mirror of
https://gitlab.ub.uni-bielefeld.de/sfb1288inf/nopaque.git
synced 2025-06-12 00:50:40 +00:00
Big update, corpus analysis reworked, versioned services, preliminary work for contributions
This commit is contained in:
@ -17,11 +17,7 @@ class Daemon(CheckCorporaMixin, CheckJobsMixin):
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
try:
|
||||
self.check_corpora()
|
||||
self.check_jobs()
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
current_app.logger.warning(e)
|
||||
pass
|
||||
self.check_corpora()
|
||||
self.check_jobs()
|
||||
db.session.commit()
|
||||
sleep(1.5)
|
||||
|
@ -26,37 +26,55 @@ class CheckCorporaMixin:
|
||||
def create_build_corpus_service(self, corpus):
|
||||
''' # Docker service settings # '''
|
||||
''' ## Command ## '''
|
||||
command = 'docker-entrypoint.sh build-corpus'
|
||||
command = ['bash', '-c']
|
||||
command.append(
|
||||
f'mkdir /corpora/data/nopaque_{corpus.id}'
|
||||
' && '
|
||||
'cwb-encode'
|
||||
' -c utf8'
|
||||
f' -d /corpora/data/nopaque_{corpus.id}'
|
||||
' -f /root/files/corpus.vrt'
|
||||
f' -R /usr/local/share/cwb/registry/nopaque_{corpus.id}'
|
||||
' -P pos -P lemma -P simple_pos'
|
||||
' -S ent:0+type -S s:0'
|
||||
' -S text:0+address+author+booktitle+chapter+editor+institution+journal+pages+publisher+publishing_year+school+title' # noqa
|
||||
' -xsB -9'
|
||||
' && '
|
||||
f'cwb-make -V NOPAQUE_{corpus.id}'
|
||||
)
|
||||
''' ## Constraints ## '''
|
||||
constraints = ['node.role==worker']
|
||||
''' ## Image ## '''
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cqpserver:r1674' # noqa
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cwb:r1702'
|
||||
''' ## Labels ## '''
|
||||
labels = {
|
||||
'origin': current_app.config['SERVER_NAME'],
|
||||
'type': 'build-corpus',
|
||||
'type': 'corpus.build',
|
||||
'corpus_id': str(corpus.id)
|
||||
}
|
||||
''' ## Mounts ## '''
|
||||
''' ### Corpus file mount ### '''
|
||||
corpus_file_source = os.path.join(corpus.path, 'merged', 'corpus.vrt')
|
||||
corpus_file_target = '/root/files/corpus.vrt'
|
||||
corpus_file_mount = f'{corpus_file_source}:{corpus_file_target}:ro'
|
||||
''' ### Corpus data mount ### '''
|
||||
corpus_data_source = os.path.join(corpus.path, 'data')
|
||||
corpus_data_target = '/corpora/data'
|
||||
corpus_data_mount = f'{corpus_data_source}:{corpus_data_target}:rw'
|
||||
# Make sure that their is no data in the corpus data directory
|
||||
shutil.rmtree(corpus_data_source, ignore_errors=True)
|
||||
os.mkdir(corpus_data_source)
|
||||
''' ### Corpus registry mount ### '''
|
||||
corpus_registry_source = os.path.join(corpus.path, 'registry')
|
||||
corpus_registry_target = '/usr/local/share/cwb/registry'
|
||||
corpus_registry_mount = f'{corpus_registry_source}:{corpus_registry_target}:rw' # noqa
|
||||
# Make sure that their is no data in the corpus registry directory
|
||||
shutil.rmtree(corpus_registry_source, ignore_errors=True)
|
||||
os.mkdir(corpus_registry_source)
|
||||
mounts = [corpus_file_mount, corpus_data_mount, corpus_registry_mount]
|
||||
mounts = []
|
||||
''' ### Data mount ### '''
|
||||
data_mount_source = os.path.join(corpus.path, 'cwb', 'data')
|
||||
data_mount_target = '/corpora/data'
|
||||
data_mount = f'{data_mount_source}:{data_mount_target}:rw'
|
||||
# Make sure that their is no data in the data directory
|
||||
shutil.rmtree(data_mount_source, ignore_errors=True)
|
||||
os.makedirs(data_mount_source)
|
||||
mounts.append(data_mount)
|
||||
''' ### File mount ### '''
|
||||
file_mount_source = os.path.join(corpus.path, 'cwb', 'corpus.vrt')
|
||||
file_mount_target = '/root/files/corpus.vrt'
|
||||
file_mount = f'{file_mount_source}:{file_mount_target}:ro'
|
||||
mounts.append(file_mount)
|
||||
''' ### Registry mount ### '''
|
||||
registry_mount_source = os.path.join(corpus.path, 'cwb', 'registry')
|
||||
registry_mount_target = '/usr/local/share/cwb/registry'
|
||||
registry_mount = f'{registry_mount_source}:{registry_mount_target}:rw'
|
||||
# Make sure that their is no data in the registry directory
|
||||
shutil.rmtree(registry_mount_source, ignore_errors=True)
|
||||
os.makedirs(registry_mount_source)
|
||||
mounts.append(registry_mount)
|
||||
''' ## Name ## '''
|
||||
name = f'build-corpus_{corpus.id}'
|
||||
''' ## Restart policy ## '''
|
||||
@ -74,7 +92,7 @@ class CheckCorporaMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Create service "{name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
return
|
||||
corpus.status = 'queued'
|
||||
@ -86,14 +104,14 @@ class CheckCorporaMixin:
|
||||
except docker.errors.NotFound as e:
|
||||
current_app.logger.error(
|
||||
f'Get service "{service_name}" failed '
|
||||
+ f'due to "docker.errors.NotFound": {e}'
|
||||
f'due to "docker.errors.NotFound": {e}'
|
||||
)
|
||||
corpus.status = 'failed'
|
||||
return
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Get service "{service_name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
service_tasks = service.tasks()
|
||||
if not service_tasks:
|
||||
@ -108,36 +126,47 @@ class CheckCorporaMixin:
|
||||
corpus.status = 'failed'
|
||||
else:
|
||||
return
|
||||
try:
|
||||
service.remove()
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Remove service "{service_name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
# try:
|
||||
# service.remove()
|
||||
# except docker.errors.APIError as e:
|
||||
# current_app.logger.error(
|
||||
# f'Remove service "{service_name}" failed '
|
||||
# f'due to "docker.errors.APIError": {e}'
|
||||
# )
|
||||
|
||||
def create_cqpserver_container(self, corpus):
|
||||
''' # Docker container settings # '''
|
||||
''' ## Command ## '''
|
||||
command = 'cqpserver'
|
||||
command = []
|
||||
command.append(
|
||||
'echo "host *;" > cqpserver.init'
|
||||
' && '
|
||||
'echo "user anonymous \\"\\";" >> cqpserver.init'
|
||||
' && '
|
||||
'cqpserver -I cqpserver.init'
|
||||
)
|
||||
''' ## Detach ## '''
|
||||
detach = True
|
||||
''' ## Entrypoint ## '''
|
||||
entrypoint = ['bash', '-c']
|
||||
''' ## Image ## '''
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cqpserver:r1674' # noqa
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cwb:r1702'
|
||||
''' ## Name ## '''
|
||||
name = f'cqpserver_{corpus.id}'
|
||||
''' ## Network ## '''
|
||||
network = 'nopaque_default'
|
||||
''' ## Volumes ## '''
|
||||
volumes = []
|
||||
''' ### Corpus data volume ### '''
|
||||
corpus_data_source = os.path.join(corpus.path, 'data')
|
||||
corpus_data_target = '/corpora/data'
|
||||
corpus_data_volume = f'{corpus_data_source}:{corpus_data_target}:rw'
|
||||
data_volume_source = os.path.join(corpus.path, 'cwb', 'data')
|
||||
data_volume_target = '/corpora/data'
|
||||
data_volume = f'{data_volume_source}:{data_volume_target}:rw'
|
||||
volumes.append(data_volume)
|
||||
''' ### Corpus registry volume ### '''
|
||||
corpus_registry_source = os.path.join(corpus.path, 'registry')
|
||||
corpus_registry_target = '/usr/local/share/cwb/registry'
|
||||
corpus_registry_volume = f'{corpus_registry_source}:{corpus_registry_target}:rw' # noqa
|
||||
volumes = [corpus_data_volume, corpus_registry_volume]
|
||||
registry_volume_source = os.path.join(corpus.path, 'cwb', 'registry')
|
||||
registry_volume_target = '/usr/local/share/cwb/registry'
|
||||
registry_volume = f'{registry_volume_source}:{registry_volume_target}:rw' # noqa
|
||||
volumes.append(registry_volume)
|
||||
# Check if a cqpserver container already exists. If this is the case,
|
||||
# remove it and create a new one
|
||||
try:
|
||||
@ -147,7 +176,7 @@ class CheckCorporaMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Get container "{name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
return
|
||||
else:
|
||||
@ -156,7 +185,7 @@ class CheckCorporaMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Remove container "{name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
return
|
||||
try:
|
||||
@ -164,6 +193,7 @@ class CheckCorporaMixin:
|
||||
image,
|
||||
command=command,
|
||||
detach=detach,
|
||||
entrypoint=entrypoint,
|
||||
volumes=volumes,
|
||||
name=name,
|
||||
network=network
|
||||
@ -171,14 +201,14 @@ class CheckCorporaMixin:
|
||||
except docker.errors.ImageNotFound as e:
|
||||
current_app.logger.error(
|
||||
f'Run container "{name}" failed '
|
||||
+ f'due to "docker.errors.ImageNotFound" error: {e}'
|
||||
f'due to "docker.errors.ImageNotFound" error: {e}'
|
||||
)
|
||||
corpus.status = 'failed'
|
||||
return
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Run container "{name}" failed '
|
||||
+ f'due to "docker.errors.APIError" error: {e}'
|
||||
f'due to "docker.errors.APIError" error: {e}'
|
||||
)
|
||||
return
|
||||
corpus.status = 'analysing'
|
||||
@ -190,14 +220,14 @@ class CheckCorporaMixin:
|
||||
except docker.errors.NotFound as e:
|
||||
current_app.logger.error(
|
||||
f'Get container "{container_name}" failed '
|
||||
+ f'due to "docker.errors.NotFound": {e}'
|
||||
f'due to "docker.errors.NotFound": {e}'
|
||||
)
|
||||
corpus.num_analysis_sessions = 0
|
||||
corpus.status = 'prepared'
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Get container "{container_name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
|
||||
def remove_cqpserver_container(self, corpus):
|
||||
@ -210,7 +240,7 @@ class CheckCorporaMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Get container "{container_name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
return
|
||||
try:
|
||||
@ -218,5 +248,5 @@ class CheckCorporaMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Remove container "{container_name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
|
@ -2,7 +2,7 @@ from datetime import datetime
|
||||
from flask import current_app
|
||||
from werkzeug.utils import secure_filename
|
||||
from .. import db
|
||||
from ..models import Job, JobResult
|
||||
from ..models import Job, JobResult, TesseractOCRModel
|
||||
import docker
|
||||
import json
|
||||
import os
|
||||
@ -23,27 +23,34 @@ class CheckJobsMixin:
|
||||
''' # Docker service settings # '''
|
||||
''' ## Service specific settings ## '''
|
||||
if job.service == 'file-setup':
|
||||
mem_mb = 2048
|
||||
mem_mb = 512
|
||||
n_cores = 2
|
||||
executable = 'file-setup'
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}file-setup:{job.service_version}' # noqa
|
||||
elif job.service == 'ocr':
|
||||
mem_mb = 4096
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}file-setup:v{job.service_version}' # noqa
|
||||
elif job.service == 'tesseract-ocr':
|
||||
mem_mb = 2048
|
||||
n_cores = 4
|
||||
executable = 'ocr'
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}ocr:{job.service_version}' # noqa
|
||||
elif job.service == 'nlp':
|
||||
mem_mb = 2048
|
||||
n_cores = 2
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}ocr:v{job.service_version}' # noqa
|
||||
elif job.service == 'spacy-nlp':
|
||||
mem_mb = 1024
|
||||
n_cores = 1
|
||||
executable = 'nlp'
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}nlp:{job.service_version}' # noqa
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}nlp:v{job.service_version}' # noqa
|
||||
''' ## Command ## '''
|
||||
command = f'{executable} -i /input -o /output'
|
||||
command += ' --log-dir /input'
|
||||
command += ' --log-dir /logs'
|
||||
command += f' --mem-mb {mem_mb}'
|
||||
command += f' --n-cores {n_cores}'
|
||||
command += f' --zip [{job.service}]_{secure_filename(job.title)}'
|
||||
command += ' ' + ' '.join(json.loads(job.service_args))
|
||||
service_args = json.loads(job.service_args)
|
||||
if job.service == 'spacy-nlp':
|
||||
command += f' -m {service_args["model"]}'
|
||||
if 'encoding_detection' in service_args and service_args['encoding_detection']: # noqa
|
||||
command += ' --check-encoding'
|
||||
elif job.service == 'tesseract-ocr':
|
||||
command += f' -m {service_args["model"]}'
|
||||
if 'binarization' in service_args and service_args['binarization']:
|
||||
command += ' --binarize'
|
||||
''' ## Constraints ## '''
|
||||
constraints = ['node.role==worker']
|
||||
''' ## Labels ## '''
|
||||
@ -53,20 +60,42 @@ class CheckJobsMixin:
|
||||
'job_id': str(job.id)
|
||||
}
|
||||
''' ## Mounts ## '''
|
||||
''' ### Input mount ### '''
|
||||
input_mount_source = job.path
|
||||
input_mount_target = '/input'
|
||||
mounts = []
|
||||
''' ### Input mount(s) ### '''
|
||||
input_mount_target_base = '/input'
|
||||
if job.service == 'file-setup':
|
||||
input_mount_target += f'/{secure_filename(job.title)}'
|
||||
input_mount = f'{input_mount_source}:{input_mount_target}:rw'
|
||||
input_mount_target_base += f'/{secure_filename(job.title)}'
|
||||
for job_input in job.inputs:
|
||||
input_mount_source = job_input.path
|
||||
input_mount_target = f'/{input_mount_target_base}/{job_input.filename}' # noqa
|
||||
input_mount = f'{input_mount_source}:{input_mount_target}:ro'
|
||||
mounts.append(input_mount)
|
||||
if job.service == 'tesseract-ocr':
|
||||
service_args = json.loads(job.service_args)
|
||||
model = TesseractOCRModel.query.get(service_args['model'])
|
||||
if model is None:
|
||||
job.status = 'failed'
|
||||
return
|
||||
models_mount_source = model.path
|
||||
models_mount_target = f'/usr/local/share/tessdata/{model.filename}'
|
||||
models_mount = f'{models_mount_source}:{models_mount_target}:ro'
|
||||
mounts.append(models_mount)
|
||||
''' ### Output mount ### '''
|
||||
output_mount_source = os.path.join(job.path, 'output')
|
||||
output_mount_source = os.path.join(job.path, 'results')
|
||||
output_mount_target = '/output'
|
||||
output_mount = f'{output_mount_source}:{output_mount_target}:rw'
|
||||
# Make sure that their is no data in the output directory
|
||||
shutil.rmtree(output_mount_source, ignore_errors=True)
|
||||
os.makedirs(output_mount_source)
|
||||
mounts = [input_mount, output_mount]
|
||||
mounts.append(output_mount)
|
||||
''' ### Pipeline data mount ### '''
|
||||
pyflow_data_mount_source = os.path.join(job.path, 'pipeline_data')
|
||||
pyflow_data_mount_target = '/logs/pyflow.data'
|
||||
pyflow_data_mount = f'{pyflow_data_mount_source}:{pyflow_data_mount_target}:rw' # noqa
|
||||
# Make sure that their is no data in the output directory
|
||||
shutil.rmtree(pyflow_data_mount_source, ignore_errors=True)
|
||||
os.makedirs(pyflow_data_mount_source)
|
||||
mounts.append(pyflow_data_mount)
|
||||
''' ## Name ## '''
|
||||
name = f'job_{job.id}'
|
||||
''' ## Resources ## '''
|
||||
@ -90,7 +119,7 @@ class CheckJobsMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Create service "{name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
return
|
||||
job.status = 'queued'
|
||||
@ -102,14 +131,14 @@ class CheckJobsMixin:
|
||||
except docker.errors.NotFound as e:
|
||||
current_app.logger.error(
|
||||
f'Get service "{service_name}" failed '
|
||||
+ f'due to "docker.errors.NotFound": {e}'
|
||||
f'due to "docker.errors.NotFound": {e}'
|
||||
)
|
||||
job.status = 'failed'
|
||||
return
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Get service "{service_name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
return
|
||||
service_tasks = service.tasks()
|
||||
@ -121,13 +150,25 @@ class CheckJobsMixin:
|
||||
return
|
||||
elif job.status == 'running' and task_state == 'complete':
|
||||
job.status = 'complete'
|
||||
results_dir = os.path.join(job.path, 'output')
|
||||
result_files = [x for x in os.listdir(results_dir) if x.endswith('.zip')] # noqa
|
||||
for result_file in result_files:
|
||||
job_result = JobResult(filename=result_file, job=job)
|
||||
results_dir = os.path.join(job.path, 'results')
|
||||
with open(os.path.join(results_dir, 'outputs.json')) as f:
|
||||
outputs = json.load(f)
|
||||
for output in outputs:
|
||||
filename = os.path.basename(output['file'])
|
||||
job_result = JobResult(
|
||||
filename=filename,
|
||||
job=job,
|
||||
mimetype=output['mimetype']
|
||||
)
|
||||
if 'description' in output:
|
||||
job_result.description = output['description']
|
||||
db.session.add(job_result)
|
||||
db.session.flush()
|
||||
db.session.flush(objects=[job_result])
|
||||
db.session.refresh(job_result)
|
||||
os.rename(
|
||||
os.path.join(results_dir, output['file']),
|
||||
job_result.path
|
||||
)
|
||||
elif job.status == 'running' and task_state == 'failed':
|
||||
job.status = 'failed'
|
||||
else:
|
||||
@ -138,7 +179,7 @@ class CheckJobsMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Remove service "{service_name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
|
||||
def remove_job_service(self, job):
|
||||
@ -151,7 +192,7 @@ class CheckJobsMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Get service "{service_name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
return
|
||||
try:
|
||||
@ -159,7 +200,7 @@ class CheckJobsMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Update service "{service_name}" failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
return
|
||||
try:
|
||||
@ -167,5 +208,5 @@ class CheckJobsMixin:
|
||||
except docker.errors.APIError as e:
|
||||
current_app.logger.error(
|
||||
f'Remove "{service_name}" service failed '
|
||||
+ f'due to "docker.errors.APIError": {e}'
|
||||
f'due to "docker.errors.APIError": {e}'
|
||||
)
|
||||
|
Reference in New Issue
Block a user