nopaque/app/daemon/job_utils.py

213 lines
8.5 KiB
Python

from app import db
from app.models import Job, JobResult, JobStatus, TesseractOCRModel
from datetime import datetime
from flask import current_app
from werkzeug.utils import secure_filename
import docker
import json
import os
import shutil
class CheckJobsMixin:
def check_jobs(self):
jobs = Job.query.all()
for job in (x for x in jobs if x.status == JobStatus.SUBMITTED):
self.create_job_service(job)
for job in (x for x in jobs if x.status in [JobStatus.QUEUED, JobStatus.RUNNING]): # noqa
self.checkout_job_service(job)
for job in (x for x in jobs if x.status == JobStatus.CANCELING):
self.remove_job_service(job)
def create_job_service(self, job):
''' # Docker service settings # '''
''' ## Service specific settings ## '''
if job.service == 'file-setup':
mem_mb = 512
n_cores = 2
executable = 'file-setup'
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}file-setup:v{job.service_version}' # noqa
elif job.service == 'tesseract-ocr':
mem_mb = 2048
n_cores = 4
executable = 'ocr'
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}ocr:v{job.service_version}' # noqa
elif job.service == 'spacy-nlp':
mem_mb = 1024
n_cores = 1
executable = 'nlp'
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}nlp:v{job.service_version}' # noqa
''' ## Command ## '''
command = f'{executable} -i /input -o /output'
command += ' --log-dir /logs'
command += f' --mem-mb {mem_mb}'
command += f' --n-cores {n_cores}'
service_args = json.loads(job.service_args)
if job.service == 'spacy-nlp':
command += f' -m {service_args["model"]}'
if 'encoding_detection' in service_args and service_args['encoding_detection']: # noqa
command += ' --check-encoding'
elif job.service == 'tesseract-ocr':
command += f' -m {service_args["model"]}'
if 'binarization' in service_args and service_args['binarization']:
command += ' --binarize'
''' ## Constraints ## '''
constraints = ['node.role==worker']
''' ## Labels ## '''
labels = {
'origin': current_app.config['SERVER_NAME'],
'type': 'job',
'job_id': str(job.id)
}
''' ## Mounts ## '''
mounts = []
''' ### Input mount(s) ### '''
input_mount_target_base = '/input'
if job.service == 'file-setup':
input_mount_target_base += f'/{secure_filename(job.title)}'
for job_input in job.inputs:
input_mount_source = job_input.path
input_mount_target = f'/{input_mount_target_base}/{job_input.filename}' # noqa
input_mount = f'{input_mount_source}:{input_mount_target}:ro'
mounts.append(input_mount)
if job.service == 'tesseract-ocr':
service_args = json.loads(job.service_args)
model = TesseractOCRModel.query.get(service_args['model'])
if model is None:
job.status = JobStatus.FAILED
return
models_mount_source = model.path
models_mount_target = f'/usr/local/share/tessdata/{model.filename}'
models_mount = f'{models_mount_source}:{models_mount_target}:ro'
mounts.append(models_mount)
''' ### Output mount ### '''
output_mount_source = os.path.join(job.path, 'results')
output_mount_target = '/output'
output_mount = f'{output_mount_source}:{output_mount_target}:rw'
# Make sure that their is no data in the output directory
shutil.rmtree(output_mount_source, ignore_errors=True)
os.makedirs(output_mount_source)
mounts.append(output_mount)
''' ### Pipeline data mount ### '''
pyflow_data_mount_source = os.path.join(job.path, 'pipeline_data')
pyflow_data_mount_target = '/logs/pyflow.data'
pyflow_data_mount = f'{pyflow_data_mount_source}:{pyflow_data_mount_target}:rw' # noqa
# Make sure that their is no data in the output directory
shutil.rmtree(pyflow_data_mount_source, ignore_errors=True)
os.makedirs(pyflow_data_mount_source)
mounts.append(pyflow_data_mount)
''' ## Name ## '''
name = f'job_{job.id}'
''' ## Resources ## '''
resources = docker.types.Resources(
cpu_reservation=n_cores * (10 ** 9),
mem_reservation=mem_mb * (10 ** 6)
)
''' ## Restart policy ## '''
restart_policy = docker.types.RestartPolicy()
try:
self.docker.services.create(
image,
command=command,
constraints=constraints,
labels=labels,
mounts=mounts,
name=name,
resources=resources,
restart_policy=restart_policy
)
except docker.errors.APIError as e:
current_app.logger.error(
f'Create service "{name}" failed '
f'due to "docker.errors.APIError": {e}'
)
return
job.status = JobStatus.QUEUED
def checkout_job_service(self, job):
service_name = f'job_{job.id}'
try:
service = self.docker.services.get(service_name)
except docker.errors.NotFound as e:
current_app.logger.error(
f'Get service "{service_name}" failed '
f'due to "docker.errors.NotFound": {e}'
)
job.status = JobStatus.FAILED
return
except docker.errors.APIError as e:
current_app.logger.error(
f'Get service "{service_name}" failed '
f'due to "docker.errors.APIError": {e}'
)
return
service_tasks = service.tasks()
if not service_tasks:
return
task_state = service_tasks[0].get('Status').get('State')
if job.status == JobStatus.QUEUED and task_state != 'pending':
job.status = JobStatus.RUNNING
return
elif job.status == JobStatus.RUNNING and task_state == 'complete': # noqa
job.status = JobStatus.COMPLETED
results_dir = os.path.join(job.path, 'results')
with open(os.path.join(results_dir, 'outputs.json')) as f:
outputs = json.load(f)
for output in outputs:
filename = os.path.basename(output['file'])
job_result = JobResult(
filename=filename,
job=job,
mimetype=output['mimetype']
)
if 'description' in output:
job_result.description = output['description']
db.session.add(job_result)
db.session.flush(objects=[job_result])
db.session.refresh(job_result)
os.rename(
os.path.join(results_dir, output['file']),
job_result.path
)
elif job.status == JobStatus.RUNNING and task_state == 'failed':
job.status = JobStatus.FAILED
else:
return
job.end_date = datetime.utcnow()
try:
service.remove()
except docker.errors.APIError as e:
current_app.logger.error(
f'Remove service "{service_name}" failed '
f'due to "docker.errors.APIError": {e}'
)
def remove_job_service(self, job):
service_name = f'job_{job.id}'
try:
service = self.docker.services.get(service_name)
except docker.errors.NotFound:
job.status = JobStatus.CANCELED
return
except docker.errors.APIError as e:
current_app.logger.error(
f'Get service "{service_name}" failed '
f'due to "docker.errors.APIError": {e}'
)
return
try:
service.update(mounts=None)
except docker.errors.APIError as e:
current_app.logger.error(
f'Update service "{service_name}" failed '
f'due to "docker.errors.APIError": {e}'
)
return
try:
service.remove()
except docker.errors.APIError as e:
current_app.logger.error(
f'Remove "{service_name}" service failed '
f'due to "docker.errors.APIError": {e}'
)