nopaque/app/daemon/job_utils.py

213 lines
8.4 KiB
Python
Raw Normal View History

2020-11-09 15:14:19 +00:00
from datetime import datetime
from flask import current_app
2020-11-13 12:33:32 +00:00
from werkzeug.utils import secure_filename
from .. import db
from ..models import Job, JobResult, TesseractOCRModel
2020-11-09 15:14:19 +00:00
import docker
import json
import os
import shutil
class CheckJobsMixin:
def check_jobs(self):
jobs = Job.query.all()
2021-11-16 14:23:57 +00:00
for job in (x for x in jobs if x.status == 'submitted'):
self.create_job_service(job)
2021-11-16 14:23:57 +00:00
for job in (x for x in jobs if x.status in ['queued', 'running']):
self.checkout_job_service(job)
2021-11-16 14:23:57 +00:00
for job in (x for x in jobs if x.status == 'canceling'):
self.remove_job_service(job)
2020-11-09 15:14:19 +00:00
def create_job_service(self, job):
''' # Docker service settings # '''
''' ## Service specific settings ## '''
2021-04-14 10:00:09 +00:00
if job.service == 'file-setup':
mem_mb = 512
n_cores = 2
executable = 'file-setup'
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}file-setup:v{job.service_version}' # noqa
elif job.service == 'tesseract-ocr':
mem_mb = 2048
n_cores = 4
executable = 'ocr'
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}ocr:v{job.service_version}' # noqa
elif job.service == 'spacy-nlp':
mem_mb = 1024
n_cores = 1
executable = 'nlp'
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}nlp:v{job.service_version}' # noqa
''' ## Command ## '''
2021-11-16 14:23:57 +00:00
command = f'{executable} -i /input -o /output'
command += ' --log-dir /logs'
2021-11-16 14:23:57 +00:00
command += f' --mem-mb {mem_mb}'
command += f' --n-cores {n_cores}'
service_args = json.loads(job.service_args)
if job.service == 'spacy-nlp':
command += f' -m {service_args["model"]}'
if 'encoding_detection' in service_args and service_args['encoding_detection']: # noqa
command += ' --check-encoding'
elif job.service == 'tesseract-ocr':
command += f' -m {service_args["model"]}'
if 'binarization' in service_args and service_args['binarization']:
command += ' --binarize'
''' ## Constraints ## '''
constraints = ['node.role==worker']
''' ## Labels ## '''
labels = {
'origin': current_app.config['SERVER_NAME'],
'type': 'job',
'job_id': str(job.id)
}
''' ## Mounts ## '''
mounts = []
''' ### Input mount(s) ### '''
input_mount_target_base = '/input'
if job.service == 'file-setup':
input_mount_target_base += f'/{secure_filename(job.title)}'
for job_input in job.inputs:
input_mount_source = job_input.path
input_mount_target = f'/{input_mount_target_base}/{job_input.filename}' # noqa
input_mount = f'{input_mount_source}:{input_mount_target}:ro'
mounts.append(input_mount)
if job.service == 'tesseract-ocr':
service_args = json.loads(job.service_args)
model = TesseractOCRModel.query.get(service_args['model'])
if model is None:
job.status = 'failed'
return
models_mount_source = model.path
models_mount_target = f'/usr/local/share/tessdata/{model.filename}'
models_mount = f'{models_mount_source}:{models_mount_target}:ro'
mounts.append(models_mount)
''' ### Output mount ### '''
output_mount_source = os.path.join(job.path, 'results')
output_mount_target = '/output'
2021-11-16 14:23:57 +00:00
output_mount = f'{output_mount_source}:{output_mount_target}:rw'
# Make sure that their is no data in the output directory
shutil.rmtree(output_mount_source, ignore_errors=True)
os.makedirs(output_mount_source)
mounts.append(output_mount)
''' ### Pipeline data mount ### '''
pyflow_data_mount_source = os.path.join(job.path, 'pipeline_data')
pyflow_data_mount_target = '/logs/pyflow.data'
pyflow_data_mount = f'{pyflow_data_mount_source}:{pyflow_data_mount_target}:rw' # noqa
# Make sure that their is no data in the output directory
shutil.rmtree(pyflow_data_mount_source, ignore_errors=True)
os.makedirs(pyflow_data_mount_source)
mounts.append(pyflow_data_mount)
''' ## Name ## '''
2021-11-16 14:23:57 +00:00
name = f'job_{job.id}'
''' ## Resources ## '''
resources = docker.types.Resources(
cpu_reservation=n_cores * (10 ** 9),
mem_reservation=mem_mb * (10 ** 6)
)
''' ## Restart policy ## '''
restart_policy = docker.types.RestartPolicy()
try:
self.docker.services.create(
image,
command=command,
constraints=constraints,
labels=labels,
mounts=mounts,
name=name,
resources=resources,
restart_policy=restart_policy
)
except docker.errors.APIError as e:
current_app.logger.error(
2021-11-16 14:23:57 +00:00
f'Create service "{name}" failed '
f'due to "docker.errors.APIError": {e}'
)
2020-11-09 15:14:19 +00:00
return
2021-11-16 14:23:57 +00:00
job.status = 'queued'
2020-11-09 15:14:19 +00:00
def checkout_job_service(self, job):
2021-11-16 14:23:57 +00:00
service_name = f'job_{job.id}'
2020-11-13 12:33:32 +00:00
try:
service = self.docker.services.get(service_name)
2021-11-16 14:23:57 +00:00
except docker.errors.NotFound as e:
current_app.logger.error(
2021-11-16 14:23:57 +00:00
f'Get service "{service_name}" failed '
f'due to "docker.errors.NotFound": {e}'
)
job.status = 'failed'
2021-11-16 14:23:57 +00:00
return
2020-11-13 12:33:32 +00:00
except docker.errors.APIError as e:
current_app.logger.error(
2021-11-16 14:23:57 +00:00
f'Get service "{service_name}" failed '
f'due to "docker.errors.APIError": {e}'
2020-11-19 11:31:29 +00:00
)
2020-11-13 12:33:32 +00:00
return
2021-11-16 14:23:57 +00:00
service_tasks = service.tasks()
if not service_tasks:
return
2021-11-16 14:23:57 +00:00
task_state = service_tasks[0].get('Status').get('State')
if job.status == 'queued' and task_state != 'pending':
job.status = 'running'
return
elif job.status == 'running' and task_state == 'complete':
job.status = 'complete'
results_dir = os.path.join(job.path, 'results')
with open(os.path.join(results_dir, 'outputs.json')) as f:
outputs = json.load(f)
for output in outputs:
filename = os.path.basename(output['file'])
job_result = JobResult(
filename=filename,
job=job,
mimetype=output['mimetype']
)
if 'description' in output:
job_result.description = output['description']
2021-11-16 14:23:57 +00:00
db.session.add(job_result)
db.session.flush(objects=[job_result])
2021-11-16 14:23:57 +00:00
db.session.refresh(job_result)
os.rename(
os.path.join(results_dir, output['file']),
job_result.path
)
2021-11-16 14:23:57 +00:00
elif job.status == 'running' and task_state == 'failed':
job.status = 'failed'
else:
2021-11-16 14:23:57 +00:00
return
job.end_date = datetime.utcnow()
try:
service.remove()
except docker.errors.APIError as e:
current_app.logger.error(
f'Remove service "{service_name}" failed '
f'due to "docker.errors.APIError": {e}'
2021-11-16 14:23:57 +00:00
)
def remove_job_service(self, job):
2021-11-16 14:23:57 +00:00
service_name = f'job_{job.id}'
2020-11-13 12:33:32 +00:00
try:
service = self.docker.services.get(service_name)
except docker.errors.NotFound:
job.status = 'canceled'
2021-11-16 14:23:57 +00:00
return
2020-11-13 12:33:32 +00:00
except docker.errors.APIError as e:
current_app.logger.error(
2021-11-16 14:23:57 +00:00
f'Get service "{service_name}" failed '
f'due to "docker.errors.APIError": {e}'
2020-11-19 11:31:29 +00:00
)
return
2021-11-16 14:23:57 +00:00
try:
service.update(mounts=None)
except docker.errors.APIError as e:
current_app.logger.error(
2021-11-16 14:23:57 +00:00
f'Update service "{service_name}" failed '
f'due to "docker.errors.APIError": {e}'
)
return
2021-11-16 14:23:57 +00:00
try:
service.remove()
except docker.errors.APIError as e:
current_app.logger.error(
f'Remove "{service_name}" service failed '
f'due to "docker.errors.APIError": {e}'
2021-11-16 14:23:57 +00:00
)