From 3923f6dd32e06eec078eeb05bb4af010a172d44a Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Wed, 4 Nov 2020 13:30:06 +0100 Subject: [PATCH 01/52] Upgrade to python 3.9.0 --- daemon/Dockerfile | 2 +- web/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/daemon/Dockerfile b/daemon/Dockerfile index c5c7191c..be9a5d74 100644 --- a/daemon/Dockerfile +++ b/daemon/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.6.12-slim-buster +FROM python:3.9.0-slim-buster LABEL authors="Patrick Jentsch , Stephan Porada " diff --git a/web/Dockerfile b/web/Dockerfile index 216964cc..1b41b48a 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.6.12-slim-buster +FROM python:3.9.0-slim-buster LABEL authors="Patrick Jentsch , Stephan Porada " From 9c9790be89201d6d4cc54164e09ec5c214454325 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Wed, 4 Nov 2020 13:30:48 +0100 Subject: [PATCH 02/52] codestyle in nopaque base template --- web/app/templates/nopaque.html.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/app/templates/nopaque.html.j2 b/web/app/templates/nopaque.html.j2 index 82ea50da..81eadb6f 100644 --- a/web/app/templates/nopaque.html.j2 +++ b/web/app/templates/nopaque.html.j2 @@ -13,7 +13,7 @@ {{ super() }} {% endblock metas %} -{% block title %}{{title}}{% endblock title %} +{% block title %}{{ title }}{% endblock title %} {% block styles %} {{ super() }} From ab21768311e387af2c35ad1e69c7c5541dcff5aa Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Wed, 4 Nov 2020 14:54:27 +0100 Subject: [PATCH 03/52] smaller rework while getting an overview --- web/app/admin/views.py | 10 ++-------- web/app/models.py | 22 +++++++++++----------- web/app/static/js/nopaque.lists.js | 10 ++++------ web/app/templates/admin/users.html.j2 | 8 +++----- 4 files changed, 20 insertions(+), 30 deletions(-) diff --git a/web/app/admin/views.py b/web/app/admin/views.py index d06c856a..c3f4e875 100644 --- a/web/app/admin/views.py +++ b/web/app/admin/views.py @@ -1,5 +1,5 @@ from flask import flash, redirect, render_template, url_for -from flask_login import current_user, login_required +from flask_login import login_required from . import admin from .forms import EditGeneralSettingsAdminForm from .. import db @@ -12,13 +12,7 @@ from ..settings import tasks as settings_tasks @login_required @admin_required def users(): - users = User.query.all() - users = [dict(username=u.username, - email=u.email, - role_id=u.role_id, - confirmed=u.confirmed, - id=u.id) - for u in users] + users = [user.to_dict() for user in User.query.all()] return render_template('admin/users.html.j2', title='Users', users=users) diff --git a/web/app/models.py b/web/app/models.py index 00c83245..4f72960a 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -145,17 +145,18 @@ class User(UserMixin, db.Model): 'email': self.email, 'last_seen': self.last_seen.timestamp(), 'member_since': self.member_since.timestamp(), - 'username': self.username, 'settings': {'dark_mode': self.setting_dark_mode, 'job_status_mail_notifications': self.setting_job_status_mail_notifications, 'job_status_site_notifications': self.setting_job_status_site_notifications}, + 'username': self.username, 'corpora': {corpus.id: corpus.to_dict() for corpus in self.corpora}, 'jobs': {job.id: job.to_dict() for job in self.jobs}, 'query_results': {query_result.id: query_result.to_dict() - for query_result in self.query_results}} + for query_result in self.query_results}, + 'role': self.role.to_dict()} def __repr__(self): ''' @@ -334,6 +335,7 @@ class Job(db.Model): end_date = db.Column(db.DateTime()) mem_mb = db.Column(db.Integer) n_cores = db.Column(db.Integer) + # This is used for zip creation secure_filename = db.Column(db.String(32)) service = db.Column(db.String(64)) ''' @@ -413,16 +415,14 @@ class Job(db.Model): 'description': self.description, 'end_date': (self.end_date.timestamp() if self.end_date else None), - 'inputs': {input.id: input.to_dict() for input in self.inputs}, - 'mem_mb': self.mem_mb, - 'n_cores': self.n_cores, - 'results': {result.id: result.to_dict() - for result in self.results}, - 'service': self.service, - 'service_args': self.service_args, - 'service_version': self.service_version, + 'service': {'args': self.service_args, + 'name': self.service, + 'version': self.service_version}, 'status': self.status, - 'title': self.title} + 'title': self.title, + 'inputs': {input.id: input.to_dict() for input in self.inputs}, + 'results': {result.id: result.to_dict() + for result in self.results}} class NotificationData(db.Model): diff --git a/web/app/static/js/nopaque.lists.js b/web/app/static/js/nopaque.lists.js index 28ae8679..8d7b6159 100644 --- a/web/app/static/js/nopaque.lists.js +++ b/web/app/static/js/nopaque.lists.js @@ -131,7 +131,7 @@ RessourceList.dataMappers = { email: user.email, id: user.id, link: `users/${user.id}`, - role_id: user.role_id, + role: user.role.name, username: user.username, username2: user.username, "delete-link": `/admin/users/${user.id}/delete`, @@ -388,11 +388,10 @@ RessourceList.options = { }, User: { item: ` + - - - + @@ -43,6 +41,6 @@ {% endblock scripts %} From 9e2cc6486c64ce61fd672c8e148bc206c9da9f3a Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Fri, 6 Nov 2020 10:28:05 +0100 Subject: [PATCH 04/52] Add NOPAQUE_HOST and NOPAQUE_PORT variables --- .env.tpl | 6 ++++++ web/nopaque.py | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.env.tpl b/.env.tpl index 0d01b60d..2c13eb3e 100644 --- a/.env.tpl +++ b/.env.tpl @@ -110,6 +110,12 @@ NOPAQUE_ADMIN_EMAIL_ADRESS= # DEFAULT: localhost # NOPAQUE_DOMAIN= +# DEFAULT: 0.0.0.0 +# NOPAQUE_HOST= + +# DEFAULT: 5000 +# NOPAQUE_PORT= + # CHOOSE ONE: http, https # DEFAULT: http # NOPAQUE_PROTOCOL= diff --git a/web/nopaque.py b/web/nopaque.py index 8884e833..d636cfd5 100644 --- a/web/nopaque.py +++ b/web/nopaque.py @@ -60,4 +60,6 @@ def test(): if __name__ == '__main__': - socketio.run(app, host='0.0.0.0') + host = os.environ.get('NOPAQUE_HOST', '0.0.0.0') + port = int(os.environ.get('NOPAQUE_PORT', '5000')) + socketio.run(app, host=host, port=port) From 0f30e518afbe0a212063a0958ea2ec13bdae16cc Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Fri, 6 Nov 2020 15:07:58 +0100 Subject: [PATCH 05/52] First steps --- web/app/daemon.py | 43 +++++ web/app/daemon/check_corpora.py | 139 +++++++++++++++++ web/app/daemon/check_jobs.py | 147 ++++++++++++++++++ web/app/daemon/libnotify/__init__.py | 0 web/app/daemon/libnotify/notification.py | 28 ++++ web/app/daemon/libnotify/service.py | 16 ++ .../libnotify/templates/notification.html | 15 ++ .../libnotify/templates/notification.txt | 10 ++ web/app/daemon/notify.py | 111 +++++++++++++ web/requirements.txt | 1 + 10 files changed, 510 insertions(+) create mode 100644 web/app/daemon.py create mode 100644 web/app/daemon/check_corpora.py create mode 100644 web/app/daemon/check_jobs.py create mode 100644 web/app/daemon/libnotify/__init__.py create mode 100644 web/app/daemon/libnotify/notification.py create mode 100644 web/app/daemon/libnotify/service.py create mode 100644 web/app/daemon/libnotify/templates/notification.html create mode 100644 web/app/daemon/libnotify/templates/notification.txt create mode 100644 web/app/daemon/notify.py diff --git a/web/app/daemon.py b/web/app/daemon.py new file mode 100644 index 00000000..9e9d27c9 --- /dev/null +++ b/web/app/daemon.py @@ -0,0 +1,43 @@ +from app import create_app +from time import sleep +from ..decorators import background +import docker + + +app = create_app() +docker_client = docker.from_env() + +app.app_context().push() +from . import check_corpora, check_jobs, notify # noqa + + +def run(): + check_corpora_thread = check_corpora() + check_jobs_thread = check_jobs() + notify_thread = notify() + + while True: + if not check_corpora_thread.is_alive(): + check_corpora_thread = check_corpora() + if not check_jobs_thread.is_alive(): + check_jobs_thread = check_jobs() + if not notify_thread.is_alive(): + notify_thread = notify() + sleep(3) + + +@background +def check_corpora(): + corpora = Corpus.query.all() + for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora): + __create_build_corpus_service(corpus) + for corpus in filter(lambda corpus: (corpus.status == 'queued' + or corpus.status == 'running'), + corpora): + __checkout_build_corpus_service(corpus) + for corpus in filter(lambda corpus: corpus.status == 'start analysis', + corpora): + __create_cqpserver_container(corpus) + for corpus in filter(lambda corpus: corpus.status == 'stop analysis', + corpora): + __remove_cqpserver_container(corpus) diff --git a/web/app/daemon/check_corpora.py b/web/app/daemon/check_corpora.py new file mode 100644 index 00000000..1150ea6c --- /dev/null +++ b/web/app/daemon/check_corpora.py @@ -0,0 +1,139 @@ +from . import docker_client +from .. import db +from ..decorators import background +from ..models import Corpus +import docker +import logging +import os +import shutil + + +@background +def check_corpora(): + corpora = Corpus.query.all() + for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora): + __create_build_corpus_service(corpus) + for corpus in filter(lambda corpus: (corpus.status == 'queued' + or corpus.status == 'running'), + corpora): + __checkout_build_corpus_service(corpus) + for corpus in filter(lambda corpus: corpus.status == 'start analysis', + corpora): + __create_cqpserver_container(corpus) + for corpus in filter(lambda corpus: corpus.status == 'stop analysis', + corpora): + __remove_cqpserver_container(corpus) + db.session.commit() + Session.remove() + + +def __create_build_corpus_service(corpus): + corpus_dir = os.path.join(config.DATA_DIR, + str(corpus.user_id), + 'corpora', + str(corpus.id)) + corpus_data_dir = os.path.join(corpus_dir, 'data') + corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt') + corpus_registry_dir = os.path.join(corpus_dir, 'registry') + if os.path.exists(corpus_data_dir): + shutil.rmtree(corpus_data_dir) + if os.path.exists(corpus_registry_dir): + shutil.rmtree(corpus_registry_dir) + os.mkdir(corpus_data_dir) + os.mkdir(corpus_registry_dir) + service_args = {'command': 'docker-entrypoint.sh build-corpus', + 'constraints': ['node.role==worker'], + 'labels': {'origin': 'nopaque', + 'type': 'corpus.prepare', + 'corpus_id': str(corpus.id)}, + 'mounts': [corpus_file + ':/root/files/corpus.vrt:ro', + corpus_data_dir + ':/corpora/data:rw', + corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], + 'name': 'build-corpus_{}'.format(corpus.id), + 'restart_policy': docker.types.RestartPolicy()} + service_image = \ + 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' + try: + service = docker_client.services.get(service_args['name']) + except docker.errors.NotFound: + pass + except docker.errors.DockerException: + return + else: + service.remove() + try: + docker_client.services.create(service_image, **service_args) + except docker.errors.DockerException: + corpus.status = 'failed' + else: + corpus.status = 'queued' + + +def __checkout_build_corpus_service(corpus): + service_name = 'build-corpus_{}'.format(corpus.id) + try: + service = docker_client.services.get(service_name) + except docker.errors.NotFound: + logging.error('__checkout_build_corpus_service({}):'.format(corpus.id) + + ' The service does not exist.' + + ' (stauts: {} -> failed)'.format(corpus.status)) + corpus.status = 'failed' + return + except docker.errors.DockerException: + return + service_tasks = service.tasks() + if not service_tasks: + return + task_state = service_tasks[0].get('Status').get('State') + if corpus.status == 'queued' and task_state != 'pending': + corpus.status = 'running' + elif corpus.status == 'running' and task_state == 'complete': + service.remove() + corpus.status = 'prepared' + elif corpus.status == 'running' and task_state == 'failed': + service.remove() + corpus.status = task_state + + +def __create_cqpserver_container(corpus): + corpus_dir = os.path.join(config.DATA_DIR, + str(corpus.user_id), + 'corpora', + str(corpus.id)) + corpus_data_dir = os.path.join(corpus_dir, 'data') + corpus_registry_dir = os.path.join(corpus_dir, 'registry') + container_args = {'command': 'cqpserver', + 'detach': True, + 'volumes': [corpus_data_dir + ':/corpora/data:rw', + corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], + 'name': 'cqpserver_{}'.format(corpus.id), + 'network': 'nopaque_default'} + container_image = \ + 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' + try: + container = docker_client.containers.get(container_args['name']) + except docker.errors.NotFound: + pass + except docker.errors.DockerException: + return + else: + container.remove(force=True) + try: + docker_client.containers.run(container_image, **container_args) + except docker.errors.DockerException: + return + else: + corpus.status = 'analysing' + + +def __remove_cqpserver_container(corpus): + container_name = 'cqpserver_{}'.format(corpus.id) + try: + container = docker_client.containers.get(container_name) + except docker.errors.NotFound: + pass + except docker.errors.DockerException: + return + else: + container.remove(force=True) + corpus.status = 'prepared' diff --git a/web/app/daemon/check_jobs.py b/web/app/daemon/check_jobs.py new file mode 100644 index 00000000..f5530e1e --- /dev/null +++ b/web/app/daemon/check_jobs.py @@ -0,0 +1,147 @@ +from datetime import datetime +from .. import configuration as config +from .. import docker_client, Session +from ..decorators import background +from ..models import Job, JobResult, NotificationData, NotificationEmailData +import docker +import logging +import json +import os + + +@background +def check_jobs(): + session = Session() + jobs = session.query(Job).all() + for job in filter(lambda job: job.status == 'submitted', jobs): + __create_job_service(job) + for job in filter(lambda job: job.status == 'queued', jobs): + __checkout_job_service(job, session) + __add_notification_data(job, 'queued', session) + for job in filter(lambda job: job.status == 'running', jobs): + __checkout_job_service(job, session) + __add_notification_data(job, 'running', session) + for job in filter(lambda job: job.status == 'complete', jobs): + __add_notification_data(job, 'complete', session) + for job in filter(lambda job: job.status == 'failed', jobs): + __add_notification_data(job, 'failed', session) + for job in filter(lambda job: job.status == 'canceling', jobs): + __remove_job_service(job) + session.commit() + Session.remove() + + +def __add_notification_data(job, notified_on_status, session): + # checks if user wants any notifications at all + if (job.user.setting_job_status_mail_notifications == 'none'): + return + # checks if user wants only notification on completed jobs + elif (job.user.setting_job_status_mail_notifications == 'end' + and notified_on_status != 'complete'): + return + else: + # check if a job already has associated NotificationData + notification_exists = len(job.notification_data) + # create notification_data for current job if there is none + if (notification_exists == 0): + notification_data = NotificationData(job_id=job.id) + session.add(notification_data) + # If no commit job will have no NotificationData + session.commit() + if (job.notification_data[0].notified_on != notified_on_status): + notification_email_data = NotificationEmailData(job_id=job.id) + notification_email_data.notify_status = notified_on_status + notification_email_data.creation_date = datetime.utcnow() + job.notification_data[0].notified_on = notified_on_status + session.add(notification_email_data) + + +def __create_job_service(job): + job_dir = os.path.join(config.DATA_DIR, + str(job.user_id), + 'jobs', + str(job.id)) + cmd = '{} -i /files -o /files/output'.format(job.service) + if job.service == 'file-setup': + cmd += ' -f {}'.format(job.secure_filename) + cmd += ' --log-dir /files' + cmd += ' --zip [{}]_{}'.format(job.service, job.secure_filename) + cmd += ' ' + ' '.join(json.loads(job.service_args)) + service_args = {'command': cmd, + 'constraints': ['node.role==worker'], + 'labels': {'origin': 'nopaque', + 'type': 'service.{}'.format(job.service), + 'job_id': str(job.id)}, + 'mounts': [job_dir + ':/files:rw'], + 'name': 'job_{}'.format(job.id), + 'resources': docker.types.Resources( + cpu_reservation=job.n_cores * (10 ** 9), + mem_reservation=job.mem_mb * (10 ** 6)), + 'restart_policy': docker.types.RestartPolicy()} + service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/' + + job.service + ':' + job.service_version) + try: + service = docker_client.services.get(service_args['name']) + except docker.errors.NotFound: + pass + except docker.errors.DockerException: + return + else: + service.remove() + try: + docker_client.services.create(service_image, **service_args) + except docker.errors.DockerException: + job.status = 'failed' + else: + job.status = 'queued' + + +def __checkout_job_service(job, session): + service_name = 'job_{}'.format(job.id) + try: + service = docker_client.services.get(service_name) + except docker.errors.NotFound: + logging.error('__checkout_job_service({}): '.format(job.id) + + 'The service does not exist. ' + + '(status: {} -> failed)'.format(job.status)) + job.status = 'failed' + return + except docker.errors.DockerException: + return + service_tasks = service.tasks() + if not service_tasks: + return + task_state = service_tasks[0].get('Status').get('State') + if job.status == 'queued' and task_state != 'pending': + job.status = 'running' + elif (job.status == 'running' + and (task_state == 'complete' or task_state == 'failed')): + service.remove() + job.end_date = datetime.utcnow() + job.status = task_state + if task_state == 'complete': + results_dir = os.path.join(config.DATA_DIR, + str(job.user_id), + 'jobs', + str(job.id), + 'output') + results = filter(lambda x: x.endswith('.zip'), + os.listdir(results_dir)) + for result in results: + job_result = JobResult(dir=results_dir, + filename=result, + job_id=job.id) + session.add(job_result) + + +def __remove_job_service(job): + service_name = 'job_{}'.format(job.id) + try: + service = docker_client.services.get(service_name) + except docker.errors.NotFound: + job.status = 'canceled' + except docker.errors.DockerException: + return + else: + service.update(mounts=None) + service.remove() diff --git a/web/app/daemon/libnotify/__init__.py b/web/app/daemon/libnotify/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/web/app/daemon/libnotify/notification.py b/web/app/daemon/libnotify/notification.py new file mode 100644 index 00000000..488471c3 --- /dev/null +++ b/web/app/daemon/libnotify/notification.py @@ -0,0 +1,28 @@ +from email.message import EmailMessage + + +class Notification(EmailMessage): + """docstring for Email.""" + + def set_notification_content(self, + subject_template, + subject_template_values_dict, + body_txt_template_path, + body_html_template_path, + body_template_values_dict): + # Create subject with subject_template_values_dict + self['subject'] = subject_template.format( + **subject_template_values_dict) + # Open template files and insert values from body_template_values_dict + with open(body_txt_template_path) as nfile: + self.body = nfile.read().format(**body_template_values_dict) + with open(body_html_template_path) as nfile: + self.html = nfile.read().format(**body_template_values_dict) + # Set txt of email + self.set_content(self.body) + # Set html alternative + self.add_alternative(self.html, subtype='html') + + def set_addresses(self, sender, recipient): + self['From'] = sender + self['to'] = recipient diff --git a/web/app/daemon/libnotify/service.py b/web/app/daemon/libnotify/service.py new file mode 100644 index 00000000..633fb386 --- /dev/null +++ b/web/app/daemon/libnotify/service.py @@ -0,0 +1,16 @@ +class NotificationService: + """This is a nopaque notifcation service object.""" + + def __init__(self, smtp): + # Bool to show if the mail server stoped sending mails due to exceeding + # its sending limit + self.mail_limit_exceeded = False + # Holds due to an error unsent email notifications + self.not_sent = {} + self.smtp = smtp + + def send(self, email): + self.smtp.send_message(email) + + def quit(self): + self.smtp.quit() diff --git a/web/app/daemon/libnotify/templates/notification.html b/web/app/daemon/libnotify/templates/notification.html new file mode 100644 index 00000000..e2edfe75 --- /dev/null +++ b/web/app/daemon/libnotify/templates/notification.html @@ -0,0 +1,15 @@ + + +

Dear {username},

+ +

The status of your Job/Corpus({id}) with the title "{title}" has changed!

+

It is now {status}!

+

Time of this status update was: {time} UTC

+ +

You can access your Job/Corpus here: {url} +

+ +

Kind regards!
+ Your nopaque team

+ + diff --git a/web/app/daemon/libnotify/templates/notification.txt b/web/app/daemon/libnotify/templates/notification.txt new file mode 100644 index 00000000..0e221c54 --- /dev/null +++ b/web/app/daemon/libnotify/templates/notification.txt @@ -0,0 +1,10 @@ +Dear {username}, + +The status of your Job/Corpus({id}) with the title "{title}" has changed! +It is now {status}! +Time of this status update was: {time} UTC + +You can access your Job/Corpus here: {url} + +Kind regards! +Your nopaque team \ No newline at end of file diff --git a/web/app/daemon/notify.py b/web/app/daemon/notify.py new file mode 100644 index 00000000..5d3d23f3 --- /dev/null +++ b/web/app/daemon/notify.py @@ -0,0 +1,111 @@ +from sqlalchemy import asc +from .libnotify.notification import Notification +from .libnotify.service import NotificationService +from .. import configuration as config +from .. import Session +from ..decorators import background +from ..models import NotificationEmailData +import logging +import os +import smtplib + + +ROOT_DIR = os.path.abspath(os.path.dirname(__file__)) + + +@background +def notify(): + session = Session() + if config.SMTP_USE_SSL: + smtp = smtplib.SMTP_SSL(host=config.SMTP_SERVER, port=config.SMTP_PORT) + else: + smtp = smtplib.SMTP(host=config.SMTP_SERVER, port=config.SMTP_PORT) + if config.SMTP_USE_TLS: + smtp.starttls() + try: + smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD) + except smtplib.SMTPHeloError: + logging.warning('The server didn’t reply properly to the HELO ' + 'greeting.') + return + except smtplib.SMTPAuthenticationError as e: + logging.warning('The server didn’t accept the username/password ' + 'combination.') + logging.warning(e) + return + except smtplib.SMTPNotSupportedError: + logging.warning('The AUTH command is not supported by the server.') + return + except smtplib.SMTPException: + logging.warning('No suitable authentication method was found.') + return + notification_service = NotificationService(smtp) + # create notifications (content, recipient etc.) + notifications = __create_mail_notifications(notification_service, session) + # only login and send mails if there are any notifications + if (len(notifications) > 0): + # combine new and unsent notifications + notifications.update(notification_service.not_sent) + # send all notifications + __send_mail_notifications(notifications, notification_service) + # remove unsent notifications because they have been sent now + # but only if mail limit has not been exceeded + if (notification_service.mail_limit_exceeded is not True): + notification_service.not_sent = {} + smtp.quit() + Session.remove() + + +# Email notification functions +def __create_mail_notifications(notification_service, session): + notification_email_data = session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all() # noqa + notifications = {} + for data in notification_email_data: + notification = Notification() + notification.set_addresses(config.SMTP_DEFAULT_SENDER, + data.job.user.email) + subject_template = ('[nopaque] Status update for your Job/Corpora: ' + '{title}!') + subject_template_values_dict = {'title': data.job.title} + url = '{}://{}/{}/{}'.format(config.PROTOCOL, + config.DOMAIN, + 'jobs', + data.job.id) + body_template_values_dict = {'username': data.job.user.username, + 'id': data.job.id, + 'title': data.job.title, + 'status': data.notify_status, + 'time': data.creation_date, + 'url': url} + txt_tmplt = os.path.join(ROOT_DIR, + 'libnotify/templates/notification.txt') + html_tmplt = os.path.join(ROOT_DIR, + 'libnotify/templates/notification.html') + notification.set_notification_content(subject_template, + subject_template_values_dict, + txt_tmplt, + html_tmplt, + body_template_values_dict) + notifications[data.job.id] = notification + # Using a dictionary for notifications avoids sending multiple mails + # if the status of a job changes in a few seconds. The user will not + # get swamped with mails for queued, running and complete if those + # happen in in a few seconds. Only the last update will be sent. + # This depends on the sleep time interval though. + session.delete(data) + session.commit() + return notifications + + +def __send_mail_notifications(notifications, notification_service): + for key, notification in notifications.items(): + try: + notification_service.send(notification) + notification_service.mail_limit_exceeded = False + except Exception: + # Adds notifications to unsent if mail server exceded limit for + # consecutive mail sending + logging.warning('limit') + notification_service.not_sent[key] = notification + notification_service.mail_limit_exceeded = True + notification_service.not_sent.update(notifications) diff --git a/web/requirements.txt b/web/requirements.txt index 0d7f6e68..a47ebf8c 100644 --- a/web/requirements.txt +++ b/web/requirements.txt @@ -1,5 +1,6 @@ cqi dnspython==1.16.0 +docker eventlet Flask Flask-Login From cb9da5c7dd12eb5685b01bc9c7b34f292a2e981e Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Mon, 9 Nov 2020 16:14:19 +0100 Subject: [PATCH 06/52] Simplify daemon logic --- docker-compose.development.yml | 8 - docker-compose.yml | 42 ++--- web/.flaskenv | 1 + web/Dockerfile | 2 +- web/app/__init__.py | 1 + web/app/daemon.py | 43 ----- web/app/daemon/check_corpora.py | 139 ----------------- web/app/daemon/check_jobs.py | 147 ------------------ web/app/daemon/libnotify/__init__.py | 0 web/app/daemon/libnotify/notification.py | 28 ---- web/app/daemon/libnotify/service.py | 16 -- .../libnotify/templates/notification.html | 15 -- .../libnotify/templates/notification.txt | 10 -- web/app/daemon/notify.py | 111 ------------- web/app/tasks/__init__.py | 35 +++++ web/app/tasks/corpus_utils.py | 120 ++++++++++++++ web/app/tasks/job_utils.py | 101 ++++++++++++ .../tasks/email/notification.html.j2 | 9 ++ .../templates/tasks/email/notification.txt.j2 | 10 ++ web/boot.sh | 18 +-- web/nopaque.py | 7 + 21 files changed, 308 insertions(+), 555 deletions(-) create mode 100644 web/.flaskenv delete mode 100644 web/app/daemon.py delete mode 100644 web/app/daemon/check_corpora.py delete mode 100644 web/app/daemon/check_jobs.py delete mode 100644 web/app/daemon/libnotify/__init__.py delete mode 100644 web/app/daemon/libnotify/notification.py delete mode 100644 web/app/daemon/libnotify/service.py delete mode 100644 web/app/daemon/libnotify/templates/notification.html delete mode 100644 web/app/daemon/libnotify/templates/notification.txt delete mode 100644 web/app/daemon/notify.py create mode 100644 web/app/tasks/__init__.py create mode 100644 web/app/tasks/corpus_utils.py create mode 100644 web/app/tasks/job_utils.py create mode 100644 web/app/templates/tasks/email/notification.html.j2 create mode 100644 web/app/templates/tasks/email/notification.txt.j2 diff --git a/docker-compose.development.yml b/docker-compose.development.yml index 0a3248db..d1ac7719 100644 --- a/docker-compose.development.yml +++ b/docker-compose.development.yml @@ -13,11 +13,3 @@ services: - "./web/nopaque.py:/home/nopaque/nopaque.py" - "./web/requirements.txt:/home/nopaque/requirements.txt" - "./web/tests:/home/nopaque/tests" - nopaqued: - volumes: - # Mount code as volumes - - "./daemon/app:/home/nopaqued/app" - - "./daemon/boot.sh:/home/nopaqued/boot.sh" - - "./daemon/config.py:/home/nopaqued/config.py" - - "./daemon/nopaqued.py:/home/nopaqued/nopaqued.py" - - "./daemon/requirements.txt:/home/nopaqued/requirements.txt" diff --git a/docker-compose.yml b/docker-compose.yml index fe90fada..57f8b5bd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,9 +1,23 @@ version: "3.5" services: + db: + env_file: db.env + image: postgres:11 + restart: unless-stopped + volumes: + - "${HOST_DB_DIR:-./db}:/var/lib/postgresql/data" + + mq: + image: redis:6 + restart: unless-stopped + volumes: + - "${HOST_MQ_DIR:-./mq}:/data" + nopaque: build: args: + DOCKER_GID: ${HOST_DOCKER_GID} GID: ${HOST_GID} UID: ${HOST_UID} context: ./web @@ -16,31 +30,3 @@ services: volumes: - "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}" - "${HOST_NOPAQUE_LOG_FILE-./nopaque.log}:${NOPAQUE_LOG_FILE:-/home/nopaque/nopaque.log}" - nopaqued: - build: - args: - DOCKER_GID: ${HOST_DOCKER_GID} - GID: ${HOST_GID} - UID: ${HOST_UID} - context: ./daemon - depends_on: - - db - - nopaque - env_file: .env - image: nopaqued:development - restart: unless-stopped - volumes: - - "/var/run/docker.sock:/var/run/docker.sock" - - "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}" - - "${HOST_NOPAQUE_DAEMON_LOG_FILE-./nopaqued.log}:${NOPAQUE_DAEMON_LOG_FILE:-/home/nopaqued/nopaqued.log}" - db: - env_file: db.env - image: postgres:11 - restart: unless-stopped - volumes: - - "${HOST_DB_DIR:-./db}:/var/lib/postgresql/data" - mq: - image: redis:6 - restart: unless-stopped - volumes: - - "${HOST_MQ_DIR:-./mq}:/data" diff --git a/web/.flaskenv b/web/.flaskenv new file mode 100644 index 00000000..1fd672d3 --- /dev/null +++ b/web/.flaskenv @@ -0,0 +1 @@ +FLASK_APP=nopaque.py diff --git a/web/Dockerfile b/web/Dockerfile index 1b41b48a..4d8037e1 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -4,9 +4,9 @@ FROM python:3.9.0-slim-buster LABEL authors="Patrick Jentsch , Stephan Porada " +ARG DOCKER_GID ARG UID ARG GID -ENV FLASK_APP=nopaque.py ENV LANG=C.UTF-8 diff --git a/web/app/__init__.py b/web/app/__init__.py index a39a51a5..75108bd3 100644 --- a/web/app/__init__.py +++ b/web/app/__init__.py @@ -38,6 +38,7 @@ def create_app(config_name): from .main import main as main_blueprint from .services import services as services_blueprint from .settings import settings as settings_blueprint + app.register_blueprint(admin_blueprint, url_prefix='/admin') app.register_blueprint(auth_blueprint, url_prefix='/auth') app.register_blueprint(corpora_blueprint, url_prefix='/corpora') diff --git a/web/app/daemon.py b/web/app/daemon.py deleted file mode 100644 index 9e9d27c9..00000000 --- a/web/app/daemon.py +++ /dev/null @@ -1,43 +0,0 @@ -from app import create_app -from time import sleep -from ..decorators import background -import docker - - -app = create_app() -docker_client = docker.from_env() - -app.app_context().push() -from . import check_corpora, check_jobs, notify # noqa - - -def run(): - check_corpora_thread = check_corpora() - check_jobs_thread = check_jobs() - notify_thread = notify() - - while True: - if not check_corpora_thread.is_alive(): - check_corpora_thread = check_corpora() - if not check_jobs_thread.is_alive(): - check_jobs_thread = check_jobs() - if not notify_thread.is_alive(): - notify_thread = notify() - sleep(3) - - -@background -def check_corpora(): - corpora = Corpus.query.all() - for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora): - __create_build_corpus_service(corpus) - for corpus in filter(lambda corpus: (corpus.status == 'queued' - or corpus.status == 'running'), - corpora): - __checkout_build_corpus_service(corpus) - for corpus in filter(lambda corpus: corpus.status == 'start analysis', - corpora): - __create_cqpserver_container(corpus) - for corpus in filter(lambda corpus: corpus.status == 'stop analysis', - corpora): - __remove_cqpserver_container(corpus) diff --git a/web/app/daemon/check_corpora.py b/web/app/daemon/check_corpora.py deleted file mode 100644 index 1150ea6c..00000000 --- a/web/app/daemon/check_corpora.py +++ /dev/null @@ -1,139 +0,0 @@ -from . import docker_client -from .. import db -from ..decorators import background -from ..models import Corpus -import docker -import logging -import os -import shutil - - -@background -def check_corpora(): - corpora = Corpus.query.all() - for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora): - __create_build_corpus_service(corpus) - for corpus in filter(lambda corpus: (corpus.status == 'queued' - or corpus.status == 'running'), - corpora): - __checkout_build_corpus_service(corpus) - for corpus in filter(lambda corpus: corpus.status == 'start analysis', - corpora): - __create_cqpserver_container(corpus) - for corpus in filter(lambda corpus: corpus.status == 'stop analysis', - corpora): - __remove_cqpserver_container(corpus) - db.session.commit() - Session.remove() - - -def __create_build_corpus_service(corpus): - corpus_dir = os.path.join(config.DATA_DIR, - str(corpus.user_id), - 'corpora', - str(corpus.id)) - corpus_data_dir = os.path.join(corpus_dir, 'data') - corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt') - corpus_registry_dir = os.path.join(corpus_dir, 'registry') - if os.path.exists(corpus_data_dir): - shutil.rmtree(corpus_data_dir) - if os.path.exists(corpus_registry_dir): - shutil.rmtree(corpus_registry_dir) - os.mkdir(corpus_data_dir) - os.mkdir(corpus_registry_dir) - service_args = {'command': 'docker-entrypoint.sh build-corpus', - 'constraints': ['node.role==worker'], - 'labels': {'origin': 'nopaque', - 'type': 'corpus.prepare', - 'corpus_id': str(corpus.id)}, - 'mounts': [corpus_file + ':/root/files/corpus.vrt:ro', - corpus_data_dir + ':/corpora/data:rw', - corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], - 'name': 'build-corpus_{}'.format(corpus.id), - 'restart_policy': docker.types.RestartPolicy()} - service_image = \ - 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' - try: - service = docker_client.services.get(service_args['name']) - except docker.errors.NotFound: - pass - except docker.errors.DockerException: - return - else: - service.remove() - try: - docker_client.services.create(service_image, **service_args) - except docker.errors.DockerException: - corpus.status = 'failed' - else: - corpus.status = 'queued' - - -def __checkout_build_corpus_service(corpus): - service_name = 'build-corpus_{}'.format(corpus.id) - try: - service = docker_client.services.get(service_name) - except docker.errors.NotFound: - logging.error('__checkout_build_corpus_service({}):'.format(corpus.id) - + ' The service does not exist.' - + ' (stauts: {} -> failed)'.format(corpus.status)) - corpus.status = 'failed' - return - except docker.errors.DockerException: - return - service_tasks = service.tasks() - if not service_tasks: - return - task_state = service_tasks[0].get('Status').get('State') - if corpus.status == 'queued' and task_state != 'pending': - corpus.status = 'running' - elif corpus.status == 'running' and task_state == 'complete': - service.remove() - corpus.status = 'prepared' - elif corpus.status == 'running' and task_state == 'failed': - service.remove() - corpus.status = task_state - - -def __create_cqpserver_container(corpus): - corpus_dir = os.path.join(config.DATA_DIR, - str(corpus.user_id), - 'corpora', - str(corpus.id)) - corpus_data_dir = os.path.join(corpus_dir, 'data') - corpus_registry_dir = os.path.join(corpus_dir, 'registry') - container_args = {'command': 'cqpserver', - 'detach': True, - 'volumes': [corpus_data_dir + ':/corpora/data:rw', - corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], - 'name': 'cqpserver_{}'.format(corpus.id), - 'network': 'nopaque_default'} - container_image = \ - 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' - try: - container = docker_client.containers.get(container_args['name']) - except docker.errors.NotFound: - pass - except docker.errors.DockerException: - return - else: - container.remove(force=True) - try: - docker_client.containers.run(container_image, **container_args) - except docker.errors.DockerException: - return - else: - corpus.status = 'analysing' - - -def __remove_cqpserver_container(corpus): - container_name = 'cqpserver_{}'.format(corpus.id) - try: - container = docker_client.containers.get(container_name) - except docker.errors.NotFound: - pass - except docker.errors.DockerException: - return - else: - container.remove(force=True) - corpus.status = 'prepared' diff --git a/web/app/daemon/check_jobs.py b/web/app/daemon/check_jobs.py deleted file mode 100644 index f5530e1e..00000000 --- a/web/app/daemon/check_jobs.py +++ /dev/null @@ -1,147 +0,0 @@ -from datetime import datetime -from .. import configuration as config -from .. import docker_client, Session -from ..decorators import background -from ..models import Job, JobResult, NotificationData, NotificationEmailData -import docker -import logging -import json -import os - - -@background -def check_jobs(): - session = Session() - jobs = session.query(Job).all() - for job in filter(lambda job: job.status == 'submitted', jobs): - __create_job_service(job) - for job in filter(lambda job: job.status == 'queued', jobs): - __checkout_job_service(job, session) - __add_notification_data(job, 'queued', session) - for job in filter(lambda job: job.status == 'running', jobs): - __checkout_job_service(job, session) - __add_notification_data(job, 'running', session) - for job in filter(lambda job: job.status == 'complete', jobs): - __add_notification_data(job, 'complete', session) - for job in filter(lambda job: job.status == 'failed', jobs): - __add_notification_data(job, 'failed', session) - for job in filter(lambda job: job.status == 'canceling', jobs): - __remove_job_service(job) - session.commit() - Session.remove() - - -def __add_notification_data(job, notified_on_status, session): - # checks if user wants any notifications at all - if (job.user.setting_job_status_mail_notifications == 'none'): - return - # checks if user wants only notification on completed jobs - elif (job.user.setting_job_status_mail_notifications == 'end' - and notified_on_status != 'complete'): - return - else: - # check if a job already has associated NotificationData - notification_exists = len(job.notification_data) - # create notification_data for current job if there is none - if (notification_exists == 0): - notification_data = NotificationData(job_id=job.id) - session.add(notification_data) - # If no commit job will have no NotificationData - session.commit() - if (job.notification_data[0].notified_on != notified_on_status): - notification_email_data = NotificationEmailData(job_id=job.id) - notification_email_data.notify_status = notified_on_status - notification_email_data.creation_date = datetime.utcnow() - job.notification_data[0].notified_on = notified_on_status - session.add(notification_email_data) - - -def __create_job_service(job): - job_dir = os.path.join(config.DATA_DIR, - str(job.user_id), - 'jobs', - str(job.id)) - cmd = '{} -i /files -o /files/output'.format(job.service) - if job.service == 'file-setup': - cmd += ' -f {}'.format(job.secure_filename) - cmd += ' --log-dir /files' - cmd += ' --zip [{}]_{}'.format(job.service, job.secure_filename) - cmd += ' ' + ' '.join(json.loads(job.service_args)) - service_args = {'command': cmd, - 'constraints': ['node.role==worker'], - 'labels': {'origin': 'nopaque', - 'type': 'service.{}'.format(job.service), - 'job_id': str(job.id)}, - 'mounts': [job_dir + ':/files:rw'], - 'name': 'job_{}'.format(job.id), - 'resources': docker.types.Resources( - cpu_reservation=job.n_cores * (10 ** 9), - mem_reservation=job.mem_mb * (10 ** 6)), - 'restart_policy': docker.types.RestartPolicy()} - service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/' - + job.service + ':' + job.service_version) - try: - service = docker_client.services.get(service_args['name']) - except docker.errors.NotFound: - pass - except docker.errors.DockerException: - return - else: - service.remove() - try: - docker_client.services.create(service_image, **service_args) - except docker.errors.DockerException: - job.status = 'failed' - else: - job.status = 'queued' - - -def __checkout_job_service(job, session): - service_name = 'job_{}'.format(job.id) - try: - service = docker_client.services.get(service_name) - except docker.errors.NotFound: - logging.error('__checkout_job_service({}): '.format(job.id) - + 'The service does not exist. ' - + '(status: {} -> failed)'.format(job.status)) - job.status = 'failed' - return - except docker.errors.DockerException: - return - service_tasks = service.tasks() - if not service_tasks: - return - task_state = service_tasks[0].get('Status').get('State') - if job.status == 'queued' and task_state != 'pending': - job.status = 'running' - elif (job.status == 'running' - and (task_state == 'complete' or task_state == 'failed')): - service.remove() - job.end_date = datetime.utcnow() - job.status = task_state - if task_state == 'complete': - results_dir = os.path.join(config.DATA_DIR, - str(job.user_id), - 'jobs', - str(job.id), - 'output') - results = filter(lambda x: x.endswith('.zip'), - os.listdir(results_dir)) - for result in results: - job_result = JobResult(dir=results_dir, - filename=result, - job_id=job.id) - session.add(job_result) - - -def __remove_job_service(job): - service_name = 'job_{}'.format(job.id) - try: - service = docker_client.services.get(service_name) - except docker.errors.NotFound: - job.status = 'canceled' - except docker.errors.DockerException: - return - else: - service.update(mounts=None) - service.remove() diff --git a/web/app/daemon/libnotify/__init__.py b/web/app/daemon/libnotify/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/web/app/daemon/libnotify/notification.py b/web/app/daemon/libnotify/notification.py deleted file mode 100644 index 488471c3..00000000 --- a/web/app/daemon/libnotify/notification.py +++ /dev/null @@ -1,28 +0,0 @@ -from email.message import EmailMessage - - -class Notification(EmailMessage): - """docstring for Email.""" - - def set_notification_content(self, - subject_template, - subject_template_values_dict, - body_txt_template_path, - body_html_template_path, - body_template_values_dict): - # Create subject with subject_template_values_dict - self['subject'] = subject_template.format( - **subject_template_values_dict) - # Open template files and insert values from body_template_values_dict - with open(body_txt_template_path) as nfile: - self.body = nfile.read().format(**body_template_values_dict) - with open(body_html_template_path) as nfile: - self.html = nfile.read().format(**body_template_values_dict) - # Set txt of email - self.set_content(self.body) - # Set html alternative - self.add_alternative(self.html, subtype='html') - - def set_addresses(self, sender, recipient): - self['From'] = sender - self['to'] = recipient diff --git a/web/app/daemon/libnotify/service.py b/web/app/daemon/libnotify/service.py deleted file mode 100644 index 633fb386..00000000 --- a/web/app/daemon/libnotify/service.py +++ /dev/null @@ -1,16 +0,0 @@ -class NotificationService: - """This is a nopaque notifcation service object.""" - - def __init__(self, smtp): - # Bool to show if the mail server stoped sending mails due to exceeding - # its sending limit - self.mail_limit_exceeded = False - # Holds due to an error unsent email notifications - self.not_sent = {} - self.smtp = smtp - - def send(self, email): - self.smtp.send_message(email) - - def quit(self): - self.smtp.quit() diff --git a/web/app/daemon/libnotify/templates/notification.html b/web/app/daemon/libnotify/templates/notification.html deleted file mode 100644 index e2edfe75..00000000 --- a/web/app/daemon/libnotify/templates/notification.html +++ /dev/null @@ -1,15 +0,0 @@ - - -

Dear {username},

- -

The status of your Job/Corpus({id}) with the title "{title}" has changed!

-

It is now {status}!

-

Time of this status update was: {time} UTC

- -

You can access your Job/Corpus here: {url} -

- -

Kind regards!
- Your nopaque team

- - diff --git a/web/app/daemon/libnotify/templates/notification.txt b/web/app/daemon/libnotify/templates/notification.txt deleted file mode 100644 index 0e221c54..00000000 --- a/web/app/daemon/libnotify/templates/notification.txt +++ /dev/null @@ -1,10 +0,0 @@ -Dear {username}, - -The status of your Job/Corpus({id}) with the title "{title}" has changed! -It is now {status}! -Time of this status update was: {time} UTC - -You can access your Job/Corpus here: {url} - -Kind regards! -Your nopaque team \ No newline at end of file diff --git a/web/app/daemon/notify.py b/web/app/daemon/notify.py deleted file mode 100644 index 5d3d23f3..00000000 --- a/web/app/daemon/notify.py +++ /dev/null @@ -1,111 +0,0 @@ -from sqlalchemy import asc -from .libnotify.notification import Notification -from .libnotify.service import NotificationService -from .. import configuration as config -from .. import Session -from ..decorators import background -from ..models import NotificationEmailData -import logging -import os -import smtplib - - -ROOT_DIR = os.path.abspath(os.path.dirname(__file__)) - - -@background -def notify(): - session = Session() - if config.SMTP_USE_SSL: - smtp = smtplib.SMTP_SSL(host=config.SMTP_SERVER, port=config.SMTP_PORT) - else: - smtp = smtplib.SMTP(host=config.SMTP_SERVER, port=config.SMTP_PORT) - if config.SMTP_USE_TLS: - smtp.starttls() - try: - smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD) - except smtplib.SMTPHeloError: - logging.warning('The server didn’t reply properly to the HELO ' - 'greeting.') - return - except smtplib.SMTPAuthenticationError as e: - logging.warning('The server didn’t accept the username/password ' - 'combination.') - logging.warning(e) - return - except smtplib.SMTPNotSupportedError: - logging.warning('The AUTH command is not supported by the server.') - return - except smtplib.SMTPException: - logging.warning('No suitable authentication method was found.') - return - notification_service = NotificationService(smtp) - # create notifications (content, recipient etc.) - notifications = __create_mail_notifications(notification_service, session) - # only login and send mails if there are any notifications - if (len(notifications) > 0): - # combine new and unsent notifications - notifications.update(notification_service.not_sent) - # send all notifications - __send_mail_notifications(notifications, notification_service) - # remove unsent notifications because they have been sent now - # but only if mail limit has not been exceeded - if (notification_service.mail_limit_exceeded is not True): - notification_service.not_sent = {} - smtp.quit() - Session.remove() - - -# Email notification functions -def __create_mail_notifications(notification_service, session): - notification_email_data = session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all() # noqa - notifications = {} - for data in notification_email_data: - notification = Notification() - notification.set_addresses(config.SMTP_DEFAULT_SENDER, - data.job.user.email) - subject_template = ('[nopaque] Status update for your Job/Corpora: ' - '{title}!') - subject_template_values_dict = {'title': data.job.title} - url = '{}://{}/{}/{}'.format(config.PROTOCOL, - config.DOMAIN, - 'jobs', - data.job.id) - body_template_values_dict = {'username': data.job.user.username, - 'id': data.job.id, - 'title': data.job.title, - 'status': data.notify_status, - 'time': data.creation_date, - 'url': url} - txt_tmplt = os.path.join(ROOT_DIR, - 'libnotify/templates/notification.txt') - html_tmplt = os.path.join(ROOT_DIR, - 'libnotify/templates/notification.html') - notification.set_notification_content(subject_template, - subject_template_values_dict, - txt_tmplt, - html_tmplt, - body_template_values_dict) - notifications[data.job.id] = notification - # Using a dictionary for notifications avoids sending multiple mails - # if the status of a job changes in a few seconds. The user will not - # get swamped with mails for queued, running and complete if those - # happen in in a few seconds. Only the last update will be sent. - # This depends on the sleep time interval though. - session.delete(data) - session.commit() - return notifications - - -def __send_mail_notifications(notifications, notification_service): - for key, notification in notifications.items(): - try: - notification_service.send(notification) - notification_service.mail_limit_exceeded = False - except Exception: - # Adds notifications to unsent if mail server exceded limit for - # consecutive mail sending - logging.warning('limit') - notification_service.not_sent[key] = notification - notification_service.mail_limit_exceeded = True - notification_service.not_sent.update(notifications) diff --git a/web/app/tasks/__init__.py b/web/app/tasks/__init__.py new file mode 100644 index 00000000..ba33a1fe --- /dev/null +++ b/web/app/tasks/__init__.py @@ -0,0 +1,35 @@ +from .. import db +from ..models import Corpus, Job +import docker + + +docker_client = docker.from_env() +from . import corpus_utils, job_utils # noqa + + +def check_corpora(): + corpora = Corpus.query.all() + for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora): + corpus_utils.create_build_corpus_service(corpus) + for corpus in filter(lambda corpus: (corpus.status == 'queued' + or corpus.status == 'running'), + corpora): + corpus_utils.checkout_build_corpus_service(corpus) + for corpus in filter(lambda corpus: corpus.status == 'start analysis', + corpora): + corpus_utils.create_cqpserver_container(corpus) + for corpus in filter(lambda corpus: corpus.status == 'stop analysis', + corpora): + corpus_utils.remove_cqpserver_container(corpus) + db.session.commit() + + +def check_jobs(): + jobs = Job.query.all() + for job in filter(lambda job: job.status == 'submitted', jobs): + job_utils.create_job_service(job) + for job in filter(lambda job: job.status == 'queued', jobs): + job_utils.checkout_job_service(job) + for job in filter(lambda job: job.status == 'running', jobs): + job_utils.checkout_job_service(job) + db.session.commit() diff --git a/web/app/tasks/corpus_utils.py b/web/app/tasks/corpus_utils.py new file mode 100644 index 00000000..c06b19ac --- /dev/null +++ b/web/app/tasks/corpus_utils.py @@ -0,0 +1,120 @@ +from flask import current_app +from . import docker_client +import docker +import logging +import os +import shutil + + +def create_build_corpus_service(corpus): + corpus_dir = os.path.join(current_app.config['DATA_DIR'], + str(corpus.user_id), + 'corpora', + str(corpus.id)) + corpus_data_dir = os.path.join(corpus_dir, 'data') + corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt') + corpus_registry_dir = os.path.join(corpus_dir, 'registry') + if os.path.exists(corpus_data_dir): + shutil.rmtree(corpus_data_dir) + if os.path.exists(corpus_registry_dir): + shutil.rmtree(corpus_registry_dir) + os.mkdir(corpus_data_dir) + os.mkdir(corpus_registry_dir) + service_args = { + 'command': 'docker-entrypoint.sh build-corpus', + 'constraints': ['node.role==worker'], + 'labels': {'origin': 'nopaque', + 'type': 'corpus.prepare', + 'corpus_id': str(corpus.id)}, + 'mounts': [corpus_file + ':/root/files/corpus.vrt:ro', + corpus_data_dir + ':/corpora/data:rw', + corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], + 'name': 'build-corpus_{}'.format(corpus.id), + 'restart_policy': docker.types.RestartPolicy() + } + service_image = \ + 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' + try: + docker_client.services.create(service_image, **service_args) + except docker.errors.APIError as e: + logging.error('create_build_corpus_service({}): '.format(corpus.id) + + '{} (status: {} -> failed)'.format(e, corpus.status)) + corpus.status = 'failed' + else: + corpus.status = 'queued' + finally: + # TODO: send email + pass + + +def checkout_build_corpus_service(corpus): + service_name = 'build-corpus_{}'.format(corpus.id) + try: + service = docker_client.services.get(service_name) + except docker.errors.NotFound as e: + logging.error('checkout_build_corpus_service({}):'.format(corpus.id) + + ' {} (stauts: {} -> failed)'.format(e, corpus.status)) + corpus.status = 'failed' + # TODO: handle docker.errors.APIError and docker.errors.InvalidVersion + else: + service_tasks = service.tasks() + if not service_tasks: + return + task_state = service_tasks[0].get('Status').get('State') + if corpus.status == 'queued' and task_state != 'pending': + corpus.status = 'running' + elif corpus.status == 'running' and task_state == 'complete': + service.remove() + corpus.status = 'prepared' + elif corpus.status == 'running' and task_state == 'failed': + service.remove() + corpus.status = task_state + finally: + # TODO: send email + pass + + +def create_cqpserver_container(corpus): + corpus_dir = os.path.join(current_app.config['DATA_DIR'], + str(corpus.user_id), + 'corpora', + str(corpus.id)) + corpus_data_dir = os.path.join(corpus_dir, 'data') + corpus_registry_dir = os.path.join(corpus_dir, 'registry') + container_args = { + 'command': 'cqpserver', + 'detach': True, + 'volumes': [corpus_data_dir + ':/corpora/data:rw', + corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], + 'name': 'cqpserver_{}'.format(corpus.id), + 'network': 'nopaque_default' + } + container_image = \ + 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' + try: + container = docker_client.containers.get(container_args['name']) + except docker.errors.NotFound: + pass + except docker.errors.DockerException: + return + else: + container.remove(force=True) + try: + docker_client.containers.run(container_image, **container_args) + except docker.errors.DockerException: + return + else: + corpus.status = 'analysing' + + +def remove_cqpserver_container(corpus): + container_name = 'cqpserver_{}'.format(corpus.id) + try: + container = docker_client.containers.get(container_name) + except docker.errors.NotFound: + pass + except docker.errors.DockerException: + return + else: + container.remove(force=True) + corpus.status = 'prepared' diff --git a/web/app/tasks/job_utils.py b/web/app/tasks/job_utils.py new file mode 100644 index 00000000..2094e7cd --- /dev/null +++ b/web/app/tasks/job_utils.py @@ -0,0 +1,101 @@ +from datetime import datetime +from flask import current_app +from . import docker_client +from .. import db +from ..models import JobResult +import docker +import logging +import json +import os + + +def create_job_service(job): + job_dir = os.path.join(current_app.config['DATA_DIR'], + str(job.user_id), + 'jobs', + str(job.id)) + cmd = '{} -i /files -o /files/output'.format(job.service) + if job.service == 'file-setup': + cmd += ' -f {}'.format(job.secure_filename) + cmd += ' --log-dir /files' + cmd += ' --zip [{}]_{}'.format(job.service, job.secure_filename) + cmd += ' ' + ' '.join(json.loads(job.service_args)) + service_args = {'command': cmd, + 'constraints': ['node.role==worker'], + 'labels': {'origin': 'nopaque', + 'type': 'service.{}'.format(job.service), + 'job_id': str(job.id)}, + 'mounts': [job_dir + ':/files:rw'], + 'name': 'job_{}'.format(job.id), + 'resources': docker.types.Resources( + cpu_reservation=job.n_cores * (10 ** 9), + mem_reservation=job.mem_mb * (10 ** 6)), + 'restart_policy': docker.types.RestartPolicy()} + service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/' + + job.service + ':' + job.service_version) + try: + docker_client.services.create(service_image, **service_args) + except docker.errors.APIError as e: + logging.error('create_job_service({}): {} '.format(job.id, e) + + '(status: {} -> failed)'.format(job.status)) + job.status = 'failed' + else: + job.status = 'queued' + finally: + # TODO: send email + pass + + +def checkout_job_service(job): + service_name = 'job_{}'.format(job.id) + try: + service = docker_client.services.get(service_name) + except docker.errors.NotFound as e: + logging.error('checkout_job_service({}): {} '.format(job.id, e) + + '(status: {} -> submitted)'.format(job.status)) + job.status = 'submitted' + # TODO: handle docker.errors.APIError and docker.errors.InvalidVersion + else: + service_tasks = service.tasks() + if not service_tasks: + return + task_state = service_tasks[0].get('Status').get('State') + if job.status == 'queued' and task_state != 'pending': + job.status = 'running' + elif job.status == 'queued' and task_state == 'complete': + service.remove() + job.end_date = datetime.utcnow() + job.status = task_state + if task_state == 'complete': + results_dir = os.path.join(current_app.config['DATA_DIR'], + str(job.user_id), + 'jobs', + str(job.id), + 'output') + results = filter(lambda x: x.endswith('.zip'), + os.listdir(results_dir)) + for result in results: + job_result = JobResult(dir=results_dir, + filename=result, + job_id=job.id) + db.session.add(job_result) + elif job.status == 'running' and task_state == 'failed': + service.remove() + job.end_date = datetime.utcnow() + job.status = task_state + finally: + # TODO: send email + pass + + +def remove_job_service(job): + service_name = 'job_{}'.format(job.id) + try: + service = docker_client.services.get(service_name) + except docker.errors.NotFound: + # TODO: send email + job.status = 'canceled' + # TODO: handle docker.errors.APIError and docker.errors.InvalidVersion + else: + service.update(mounts=None) + service.remove() diff --git a/web/app/templates/tasks/email/notification.html.j2 b/web/app/templates/tasks/email/notification.html.j2 new file mode 100644 index 00000000..79f0e2dd --- /dev/null +++ b/web/app/templates/tasks/email/notification.html.j2 @@ -0,0 +1,9 @@ +

Dear {{ user.username }},

+ +

The status of your Job/Corpus({{ job.id }}) with the title "{{ job.title }}" has changed!

+

It is now {{ job.status }}!

+

Time of this status update was: {time} UTC

+ +

You can access your Job/Corpus here: {{ url_for('jobs.job', job_id=job.id) }}

+ +

Kind regards!
Your nopaque team

diff --git a/web/app/templates/tasks/email/notification.txt.j2 b/web/app/templates/tasks/email/notification.txt.j2 new file mode 100644 index 00000000..25d797c8 --- /dev/null +++ b/web/app/templates/tasks/email/notification.txt.j2 @@ -0,0 +1,10 @@ +Dear {{ user.username }}, + +The status of your Job/Corpus({{ job.id }}) with the title "{{ job.title }}" has changed! +It is now {{ job.status }}! +Time of this status update was: {time} UTC + +You can access your Job/Corpus here: {{ url_for('jobs.job', job_id=job.id) }} + +Kind regards! +Your nopaque team diff --git a/web/boot.sh b/web/boot.sh index 0d088ac2..f39bb4c8 100755 --- a/web/boot.sh +++ b/web/boot.sh @@ -1,15 +1,15 @@ #!/bin/bash source venv/bin/activate -export FLASK_APP=nopaque.py +while true; do + flask deploy + if [[ "$?" == "0" ]]; then + break + fi + echo Deploy command failed, retrying in 5 secs... + sleep 5 +done + if [[ "$#" -eq 0 ]]; then - while true; do - flask deploy - if [[ "$?" == "0" ]]; then - break - fi - echo Deploy command failed, retrying in 5 secs... - sleep 5 - done python nopaque.py elif [[ "$1" == "flask" ]]; then exec ${@:1} diff --git a/web/nopaque.py b/web/nopaque.py index d636cfd5..43d69c38 100644 --- a/web/nopaque.py +++ b/web/nopaque.py @@ -51,6 +51,13 @@ def deploy(): Role.insert_roles() +@app.cli.command() +def tasks(): + from app.tasks import process_corpora, process_jobs + process_corpora() + process_jobs() + + @app.cli.command() def test(): """Run the unit tests.""" From 5a06a6b241e19e8b3fa33d154740f0da9a631d1b Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Fri, 13 Nov 2020 10:01:51 +0100 Subject: [PATCH 07/52] More exception handling. Remove unused database models. New common view structure! --- .env.tpl | 135 +++--- docker-compose.yml | 1 + web/Dockerfile | 5 +- web/app/__init__.py | 2 +- web/app/admin/__init__.py | 2 +- web/app/admin/forms.py | 1 - web/app/admin/views.py | 14 +- web/app/auth/__init__.py | 2 +- web/app/auth/forms.py | 2 +- web/app/auth/views.py | 107 +++-- web/app/corpora/views.py | 390 ++++++++---------- web/app/email.py | 4 +- web/app/jobs/__init__.py | 2 +- web/app/jobs/views.py | 30 +- web/app/main/__init__.py | 2 +- web/app/main/views.py | 25 +- web/app/models.py | 181 +++----- web/app/query_results/views.py | 150 ------- web/app/services/__init__.py | 2 +- web/app/services/views.py | 61 ++- web/app/settings/forms.py | 2 +- web/app/settings/views.py | 19 +- web/app/tasks/__init__.py | 14 +- web/app/tasks/corpus_utils.py | 131 +++--- web/app/tasks/job_utils.py | 90 ++-- web/app/templates/auth/login.html.j2 | 10 +- web/app/templates/auth/register.html.j2 | 12 +- web/app/templates/auth/reset_password.html.j2 | 8 +- .../auth/reset_password_request.html.j2 | 6 +- web/app/templates/corpora/add_corpus.html.j2 | 8 +- .../templates/corpora/add_corpus_file.html.j2 | 14 +- .../templates/corpora/analyse_corpus.html.j2 | 2 +- web/app/templates/corpora/corpus_file.html.j2 | 12 +- .../templates/corpora/import_corpus.html.j2 | 12 +- .../query_results/add_query_result.html.j2 | 10 +- web/app/templates/main/index.html.j2 | 10 +- web/app/templates/nopaque.html.j2 | 6 +- web/app/templates/services/file-setup.html.j2 | 12 +- web/app/templates/services/nlp.html.j2 | 18 +- web/app/templates/services/ocr.html.j2 | 18 +- .../tasks/email/notification.html.j2 | 7 +- .../templates/tasks/email/notification.txt.j2 | 7 +- web/boot.sh | 1 + web/config.py | 139 +++---- web/nopaque.py | 11 +- 45 files changed, 692 insertions(+), 1005 deletions(-) delete mode 100644 web/app/query_results/views.py diff --git a/.env.tpl b/.env.tpl index 2c13eb3e..e7c2cea7 100644 --- a/.env.tpl +++ b/.env.tpl @@ -9,128 +9,116 @@ # NOTE: Use `.` as # HOST_MQ_DIR= -# Example: 999 -# HINT: Use this bash command `getent group docker | cut -d: -f3` -HOST_DOCKER_GID= +# Example: 1000 +# HINT: Use this bash command `id -u` +HOST_UID= # Example: 1000 # HINT: Use this bash command `id -g` HOST_GID= -# DEFAULT: ./nopaqued.log -# NOTES: Use `.` as , -# This file must be present on container startup -# HOST_NOPAQUE_DAEMON_LOG_FILE= +# Example: 999 +# HINT: Use this bash command `getent group docker | cut -d: -f3` +HOST_DOCKER_GID= # DEFAULT: ./nopaque.log # NOTES: Use `.` as , # This file must be present on container startup -# HOST_NOPAQUE_LOG_FILE= - -# Example: 1000 -# HINT: Use this bash command `id -u` -HOST_UID= +# HOST_LOG_FILE= ################################################################################ -# Cookies # +# Flask # +# https://flask.palletsprojects.com/en/1.1.x/config/ # ################################################################################ -# CHOOSE ONE: False, True -# DEFAULT: False -# HINT: Set to true if you redirect http to https -# NOPAQUE_REMEMBER_COOKIE_SECURE= +# DEFAULT: hard to guess string +# HINT: Use this bash command `python -c "import uuid; print(uuid.uuid4().hex)"` +# SECRET_KEY= # CHOOSE ONE: False, True # DEFAULT: False # HINT: Set to true if you redirect http to https -# NOPAQUE_SESSION_COOKIE_SECURE= +# SESSION_COOKIE_SECURE= ################################################################################ -# Database # -# DATABASE_URI blueprint: # -# - dialect[+driver]://username:password@host[:port]/database # -# - sqlite is not supported # -# - values in square brackets are optional # +# Flask-Login # +# https://flask-login.readthedocs.io/en/latest/ # ################################################################################ -# DEFAULT: postgresql://nopaque:nopaque@db/nopaque -# NOPAQUE_DATABASE_URL= - -# DEFAULT: postgresql://nopaque:nopaque@db/nopaque_dev -# NOPAQUE_DEV_DATABASE_URL= - -# DEFAULT: postgresql://nopaque:nopaque@db/nopaque_test -# NOPAQUE_TEST_DATABASE_URL= +# CHOOSE ONE: False, True +# DEFAULT: False +# HINT: Set to true if you redirect http to https +# REMEMBER_COOKIE_SECURE= ################################################################################ -# Email # +# Flask-Mail # +# https://pythonhosted.org/Flask-Mail/ # ################################################################################ # EXAMPLE: nopaque Admin -NOPAQUE_SMTP_DEFAULT_SENDER= +MAIL_DEFAULT_SENDER= -NOPAQUE_SMTP_PASSWORD= +MAIL_PASSWORD= # EXAMPLE: smtp.example.com -NOPAQUE_SMTP_SERVER= +MAIL_SERVER= # EXAMPLE: 587 -NOPAQUE_SMTP_PORT= +MAIL_PORT= # CHOOSE ONE: False, True # DEFAULT: False -# NOPAQUE_SMTP_USE_SSL= +# MAIL_USE_SSL= # CHOOSE ONE: False, True # DEFAULT: False -# NOPAQUE_SMTP_USE_TLS= +# MAIL_USE_TLS= # EXAMPLE: nopaque@example.com -NOPAQUE_SMTP_USERNAME= +MAIL_USERNAME= ################################################################################ -# General # +# Flask-SQLAlchemy # +# https://flask-sqlalchemy.palletsprojects.com/en/2.x/config/ # ################################################################################ +# DEFAULT with development config: postgresql://nopaque:nopaque@db/nopaque_dev +# DEFAULT with production config: postgresql://nopaque:nopaque@db/nopaque +# DEFAULT with testing config: postgresql://nopaque:nopaque@db/nopaque_test +# SQLALCHEMY_DATABASE_URI= + + +################################################################################ +# nopaque # +################################################################################ +# If an account is registered with this email adress gets automatically +# assigned the administrator role. # EXAMPLE: admin.nopaque@example.com -NOPAQUE_ADMIN_EMAIL_ADRESS= +NOPAQUE_ADMIN= # DEFAULT: development # CHOOSE ONE: development, production, testing # NOPAQUE_CONFIG= +# This email adress is used for the contact button in the nopaque footer. If +# not set, no contact button is displayed. # DEFAULT: None # EXAMPLE: contact.nopaque@example.com -# NOPAQUE_CONTACT_EMAIL_ADRESS= +# NOPAQUE_CONTACT= # DEFAULT: /mnt/nopaque -# NOTE: This must be a network share and it must be available on all Docker Swarm nodes +# NOTE: This must be a network share and it must be available on all Docker +# Swarm nodes # NOPAQUE_DATA_DIR= -# DEFAULT: localhost -# NOPAQUE_DOMAIN= - # DEFAULT: 0.0.0.0 # NOPAQUE_HOST= # DEFAULT: 5000 # NOPAQUE_PORT= -# CHOOSE ONE: http, https -# DEFAULT: http -# NOPAQUE_PROTOCOL= - -# DEFAULT: hard to guess string -# HINT: Use this bash command `python -c "import uuid; print(uuid.uuid4().hex)"` -# NOPAQUE_SECRET_KEY= - - -################################################################################ -# Logging # -################################################################################ -# DEFAULT: /home/nopaqued/nopaqued.log ~ /home/nopaqued/nopaqued.log -# NOTE: Use `.` as -# NOPAQUE_DAEMON_LOG_FILE= +# transport://[userid:password]@hostname[:port]/[virtual_host] +NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI= # DEFAULT: %Y-%m-%d %H:%M:%S # NOPAQUE_LOG_DATE_FORMAT= @@ -146,37 +134,22 @@ NOPAQUE_ADMIN_EMAIL_ADRESS= # CHOOSE ONE: CRITICAL, ERROR, WARNING, INFO, DEBUG # NOPAQUE_LOG_LEVEL= - -################################################################################ -# Message queue # -# MESSAGE_QUEUE_URI blueprint: # -# - transport://[userid:password]@hostname[:port]/[virtual_host] # -# - values in square brackets are optional # -################################################################################ -# DEFAULT: None -# HINT: A message queue is not required when using a single server process -# NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI= - - -################################################################################ -# Proxy fix # -################################################################################ # DEFAULT: 0 # Number of values to trust for X-Forwarded-For -# NOPAQUE_NUM_PROXIES_X_FOR= +# NOPAQUE_PROXY_FIX_X_FOR= # DEFAULT: 0 # Number of values to trust for X-Forwarded-Host -# NOPAQUE_NUM_PROXIES_X_HOST= +# NOPAQUE_PROXY_FIX_X_HOST= # DEFAULT: 0 # Number of values to trust for X-Forwarded-Port -# NOPAQUE_NUM_PROXIES_X_PORT= +# NOPAQUE_PROXY_FIX_X_PORT= # DEFAULT: 0 # Number of values to trust for X-Forwarded-Prefix -# NOPAQUE_NUM_PROXIES_X_PREFIX= +# NOPAQUE_PROXY_FIX_X_PREFIX= # DEFAULT: 0 # Number of values to trust for X-Forwarded-Proto -# NOPAQUE_NUM_PROXIES_X_PROTO= +# NOPAQUE_PROXY_FIX_X_PROTO= diff --git a/docker-compose.yml b/docker-compose.yml index 57f8b5bd..c1d75151 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,5 +28,6 @@ services: image: nopaque:development restart: unless-stopped volumes: + - "/var/run/docker.sock:/var/run/docker.sock" - "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}" - "${HOST_NOPAQUE_LOG_FILE-./nopaque.log}:${NOPAQUE_LOG_FILE:-/home/nopaque/nopaque.log}" diff --git a/web/Dockerfile b/web/Dockerfile index 4d8037e1..3681b701 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -21,8 +21,9 @@ RUN apt-get update \ && rm -r /var/lib/apt/lists/* -RUN groupadd --gid ${GID} --system nopaque \ - && useradd --create-home --gid ${GID} --no-log-init --system --uid ${UID} nopaque +RUN groupadd --gid ${DOCKER_GID} --system docker \ + && groupadd --gid ${GID} --system nopaque \ + && useradd --create-home --gid ${GID} --groups ${DOCKER_GID} --no-log-init --system --uid ${UID} nopaque USER nopaque WORKDIR /home/nopaque diff --git a/web/app/__init__.py b/web/app/__init__.py index 75108bd3..9a399ddc 100644 --- a/web/app/__init__.py +++ b/web/app/__init__.py @@ -26,7 +26,7 @@ def create_app(config_name): mail.init_app(app) paranoid.init_app(app) socketio.init_app( - app, message_queue=config[config_name].SOCKETIO_MESSAGE_QUEUE_URI) + app, message_queue=app.config['NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI']) with app.app_context(): from . import events diff --git a/web/app/admin/__init__.py b/web/app/admin/__init__.py index 40fd29a4..9bb011f8 100644 --- a/web/app/admin/__init__.py +++ b/web/app/admin/__init__.py @@ -2,4 +2,4 @@ from flask import Blueprint admin = Blueprint('admin', __name__) -from . import views # noqa +from . import views diff --git a/web/app/admin/forms.py b/web/app/admin/forms.py index 42706bab..a0f796d6 100644 --- a/web/app/admin/forms.py +++ b/web/app/admin/forms.py @@ -12,4 +12,3 @@ class EditGeneralSettingsAdminForm(EditGeneralSettingsForm): super().__init__(*args, user=user, **kwargs) self.role.choices = [(role.id, role.name) for role in Role.query.order_by(Role.name).all()] - self.user = user diff --git a/web/app/admin/views.py b/web/app/admin/views.py index c3f4e875..798c0df8 100644 --- a/web/app/admin/views.py +++ b/web/app/admin/views.py @@ -29,12 +29,11 @@ def user(user_id): @admin_required def delete_user(user_id): settings_tasks.delete_user(user_id) - flash('User has been deleted!') + flash('User has been marked for deletion!') return redirect(url_for('.users')) -@admin.route('/users//edit_general_settings', - methods=['GET', 'POST']) +@admin.route('/users//edit_general_settings', methods=['GET', 'POST']) # noqa @login_required @admin_required def edit_general_settings(user_id): @@ -46,16 +45,13 @@ def edit_general_settings(user_id): user.username = form.username.data user.confirmed = form.confirmed.data user.role = Role.query.get(form.role.data) - db.session.add(user) db.session.commit() - flash('The profile has been updated.') - return redirect(url_for('admin.edit_general_settings', user_id=user.id)) + flash('Settings have been updated.') + return redirect(url_for('.edit_general_settings', user_id=user.id)) form.confirmed.data = user.confirmed form.dark_mode.data = user.setting_dark_mode form.email.data = user.email form.role.data = user.role_id form.username.data = user.username return render_template('admin/edit_general_settings.html.j2', - form=form, - title='General settings', - user=user) + form=form, title='General settings', user=user) diff --git a/web/app/auth/__init__.py b/web/app/auth/__init__.py index 893d7071..a45dc3f3 100644 --- a/web/app/auth/__init__.py +++ b/web/app/auth/__init__.py @@ -2,4 +2,4 @@ from flask import Blueprint auth = Blueprint('auth', __name__) -from . import views # noqa +from . import views diff --git a/web/app/auth/forms.py b/web/app/auth/forms.py index 3344096b..98dab654 100644 --- a/web/app/auth/forms.py +++ b/web/app/auth/forms.py @@ -18,7 +18,7 @@ class RegistrationForm(FlaskForm): username = StringField( 'Username', validators=[DataRequired(), Length(1, 64), - Regexp(current_app.config['ALLOWED_USERNAME_REGEX'], + Regexp(current_app.config['NOPAQUE_USERNAME_REGEX'], message='Usernames must have only letters, numbers,' ' dots or underscores')] ) diff --git a/web/app/auth/views.py b/web/app/auth/views.py index c1fe37ed..b6355642 100644 --- a/web/app/auth/views.py +++ b/web/app/auth/views.py @@ -1,5 +1,5 @@ -from flask import (current_app, flash, redirect, render_template, request, - url_for) +from datetime import datetime +from flask import abort, flash, redirect, render_template, request, url_for from flask_login import current_user, login_user, login_required, logout_user from . import auth from .forms import (LoginForm, ResetPasswordForm, ResetPasswordRequestForm, @@ -7,8 +7,8 @@ from .forms import (LoginForm, ResetPasswordForm, ResetPasswordRequestForm, from .. import db from ..email import create_message, send from ..models import User +import logging import os -import shutil @auth.before_app_request @@ -18,11 +18,12 @@ def before_request(): unconfirmed view if user is unconfirmed. """ if current_user.is_authenticated: - current_user.ping() - if not current_user.confirmed \ - and request.endpoint \ - and request.blueprint != 'auth' \ - and request.endpoint != 'static': + current_user.last_seen = datetime.utcnow() + db.session.commit() + if (not current_user.confirmed + and request.endpoint + and request.blueprint != 'auth' + and request.endpoint != 'static'): return redirect(url_for('auth.unconfirmed')) @@ -30,20 +31,19 @@ def before_request(): def login(): if current_user.is_authenticated: return redirect(url_for('main.dashboard')) - login_form = LoginForm(prefix='login-form') - if login_form.validate_on_submit(): - user = User.query.filter_by(username=login_form.user.data).first() + form = LoginForm(prefix='login-form') + if form.validate_on_submit(): + user = User.query.filter_by(username=form.user.data).first() if user is None: - user = User.query.filter_by(email=login_form.user.data).first() - if user is not None and user.verify_password(login_form.password.data): - login_user(user, login_form.remember_me.data) + user = User.query.filter_by(email=form.user.data.lower()).first() + if user is not None and user.verify_password(form.password.data): + login_user(user, form.remember_me.data) next = request.args.get('next') if next is None or not next.startswith('/'): next = url_for('main.dashboard') return redirect(next) flash('Invalid email/username or password.') - return render_template('auth/login.html.j2', login_form=login_form, - title='Log in') + return render_template('auth/login.html.j2', form=form, title='Log in') @auth.route('/logout') @@ -58,26 +58,28 @@ def logout(): def register(): if current_user.is_authenticated: return redirect(url_for('main.dashboard')) - registration_form = RegistrationForm(prefix='registration-form') - if registration_form.validate_on_submit(): - user = User(email=registration_form.email.data.lower(), - password=registration_form.password.data, - username=registration_form.username.data) + form = RegistrationForm(prefix='registration-form') + if form.validate_on_submit(): + user = User(email=form.email.data.lower(), + password=form.password.data, + username=form.username.data) db.session.add(user) db.session.commit() - user_dir = os.path.join(current_app.config['DATA_DIR'], - str(user.id)) - if os.path.exists(user_dir): - shutil.rmtree(user_dir) - os.mkdir(user_dir) - token = user.generate_confirmation_token() - msg = create_message(user.email, 'Confirm Your Account', - 'auth/email/confirm', token=token, user=user) - send(msg) - flash('A confirmation email has been sent to you by email.') - return redirect(url_for('auth.login')) - return render_template('auth/register.html.j2', - registration_form=registration_form, + try: + os.makedirs(user.path) + except OSError: + logging.error('Make dir {} led to an OSError!'.format(user.path)) + db.session.delete(user) + db.session.commit() + abort(500) + else: + token = user.generate_confirmation_token() + msg = create_message(user.email, 'Confirm Your Account', + 'auth/email/confirm', token=token, user=user) + send(msg) + flash('A confirmation email has been sent to you by email.') + return redirect(url_for('.login')) + return render_template('auth/register.html.j2', form=form, title='Register') @@ -92,7 +94,7 @@ def confirm(token): return redirect(url_for('main.dashboard')) else: flash('The confirmation link is invalid or has expired.') - return redirect(url_for('auth.unconfirmed')) + return redirect(url_for('.unconfirmed')) @auth.route('/unconfirmed') @@ -119,39 +121,32 @@ def resend_confirmation(): def reset_password_request(): if current_user.is_authenticated: return redirect(url_for('main.dashboard')) - reset_password_request_form = ResetPasswordRequestForm( - prefix='reset-password-request-form') - if reset_password_request_form.validate_on_submit(): - submitted_email = reset_password_request_form.email.data - user = User.query.filter_by(email=submitted_email.lower()).first() - if user: + form = ResetPasswordRequestForm(prefix='reset-password-request-form') + if form.validate_on_submit(): + user = User.query.filter_by(email=form.email.data.lower()).first() + if user is not None: token = user.generate_reset_token() msg = create_message(user.email, 'Reset Your Password', 'auth/email/reset_password', token=token, user=user) send(msg) - flash('An email with instructions to reset your password has been ' - 'sent to you.') - return redirect(url_for('auth.login')) - return render_template( - 'auth/reset_password_request.html.j2', - reset_password_request_form=reset_password_request_form, - title='Password Reset') + flash('An email with instructions to reset your password has been sent to you.') # noqa + return redirect(url_for('.login')) + return render_template('auth/reset_password_request.html.j2', form=form, + title='Password Reset') @auth.route('/reset/', methods=['GET', 'POST']) def reset_password(token): if current_user.is_authenticated: return redirect(url_for('main.dashboard')) - reset_password_form = ResetPasswordForm(prefix='reset-password-form') - if reset_password_form.validate_on_submit(): - if User.reset_password(token, reset_password_form.password.data): + form = ResetPasswordForm(prefix='reset-password-form') + if form.validate_on_submit(): + if User.reset_password(token, form.password.data): db.session.commit() flash('Your password has been updated.') - return redirect(url_for('auth.login')) + return redirect(url_for('.login')) else: return redirect(url_for('main.index')) - return render_template('auth/reset_password.html.j2', - reset_password_form=reset_password_form, - title='Password Reset', - token=token) + return render_template('auth/reset_password.html.j2', form=form, + title='Password Reset', token=token) diff --git a/web/app/corpora/views.py b/web/app/corpora/views.py index a6bef316..13874243 100644 --- a/web/app/corpora/views.py +++ b/web/app/corpora/views.py @@ -1,4 +1,4 @@ -from flask import (abort, current_app, flash, make_response, redirect, request, +from flask import (abort, flash, make_response, redirect, request, render_template, url_for, send_from_directory) from flask_login import current_user, login_required from . import corpora @@ -11,6 +11,7 @@ from jsonschema import validate from .. import db from ..models import Corpus, CorpusFile, QueryResult import json +import logging import os import shutil import glob @@ -22,106 +23,92 @@ from .import_corpus import check_zip_contents @corpora.route('/add', methods=['GET', 'POST']) @login_required def add_corpus(): - add_corpus_form = AddCorpusForm() - if add_corpus_form.validate_on_submit(): + form = AddCorpusForm() + if form.validate_on_submit(): corpus = Corpus(creator=current_user, - description=add_corpus_form.description.data, - status='unprepared', title=add_corpus_form.title.data) + description=form.description.data, + title=form.title.data) db.session.add(corpus) db.session.commit() - dir = os.path.join(current_app.config['DATA_DIR'], - str(corpus.user_id), 'corpora', str(corpus.id)) try: - os.makedirs(dir) + os.makedirs(corpus.path) except OSError: - flash('[ERROR]: Could not add corpus!', 'corpus') - corpus.delete() - else: - url = url_for('corpora.corpus', corpus_id=corpus.id) - flash('[{}] added'.format(url, corpus.title), - 'corpus') - return redirect(url_for('corpora.corpus', corpus_id=corpus.id)) - return render_template('corpora/add_corpus.html.j2', - add_corpus_form=add_corpus_form, + logging.error('Make dir {} led to an OSError!'.format(corpus.path)) + db.session.delete(corpus) + db.session.commit() + abort(500) + flash('Corpus "{}" added!'.format(corpus.title), 'corpus') + return redirect(url_for('.corpus', corpus_id=corpus.id)) + return render_template('corpora/add_corpus.html.j2', form=form, title='Add corpus') @corpora.route('/import', methods=['GET', 'POST']) @login_required def import_corpus(): - import_corpus_form = ImportCorpusForm() - if import_corpus_form.is_submitted(): - if not import_corpus_form.validate(): - return make_response(import_corpus_form.errors, 400) + form = ImportCorpusForm() + if form.is_submitted(): + if not form.validate(): + return make_response(form.errors, 400) corpus = Corpus(creator=current_user, - description=import_corpus_form.description.data, - status='unprepared', - title=import_corpus_form.title.data) + description=form.description.data, + title=form.title.data) db.session.add(corpus) db.session.commit() - dir = os.path.join(current_app.config['DATA_DIR'], - str(corpus.user_id), 'corpora', str(corpus.id)) try: - os.makedirs(dir) + os.makedirs(corpus.path) except OSError: - flash('[ERROR]: Could not import corpus!', 'corpus') - corpus.delete() + logging.error('Make dir {} led to an OSError!'.format(corpus.path)) + db.session.delete(corpus) + db.session.commit() + flash('Internal Server Error', 'error') + return make_response( + {'redirect_url': url_for('.import_corpus')}, 500) + # Upload zip + archive_file = os.path.join(corpus.path, form.file.data.filename) + form.file.data.save(archive_file) + # Some checks to verify it is a valid exported corpus + with ZipFile(archive_file, 'r') as zip: + contents = zip.namelist() + if set(check_zip_contents).issubset(contents): + # Unzip + shutil.unpack_archive(archive_file, corpus.path) + # Register vrt files to corpus + vrts = glob.glob(corpus.path + '/*.vrt') + for file in vrts: + element_tree = ET.parse(file) + text_node = element_tree.find('text') + corpus_file = CorpusFile( + address=text_node.get('address', 'NULL'), + author=text_node.get('author', 'NULL'), + booktitle=text_node.get('booktitle', 'NULL'), + chapter=text_node.get('chapter', 'NULL'), + corpus=corpus, + editor=text_node.get('editor', 'NULL'), + filename=os.path.basename(file), + institution=text_node.get('institution', 'NULL'), + journal=text_node.get('journal', 'NULL'), + pages=text_node.get('pages', 'NULL'), + publisher=text_node.get('publisher', 'NULL'), + publishing_year=text_node.get('publishing_year', ''), + school=text_node.get('school', 'NULL'), + title=text_node.get('title', 'NULL') + ) + db.session.add(corpus_file) + # finish import and redirect to imported corpus + corpus.status = 'prepared' + db.session.commit() + os.remove(archive_file) + flash('Corpus "{}" imported!'.format(corpus.title), 'corpus') + return make_response( + {'redirect_url': url_for('.corpus', corpus_id=corpus.id)}, 201) else: - # Upload zip - archive_file = os.path.join(current_app.config['DATA_DIR'], dir, - import_corpus_form.file.data.filename) - corpus_dir = os.path.dirname(archive_file) - import_corpus_form.file.data.save(archive_file) - # Some checks to verify it is a valid exported corpus - with ZipFile(archive_file, 'r') as zip: - contents = zip.namelist() - if set(check_zip_contents).issubset(contents): - # Unzip - shutil.unpack_archive(archive_file, corpus_dir) - # Register vrt files to corpus - vrts = glob.glob(corpus_dir + '/*.vrt') - for file in vrts: - element_tree = ET.parse(file) - text_node = element_tree.find('text') - corpus_file = CorpusFile( - address=text_node.get('address', 'NULL'), - author=text_node.get('author', 'NULL'), - booktitle=text_node.get('booktitle', 'NULL'), - chapter=text_node.get('chapter', 'NULL'), - corpus=corpus, - dir=dir, - editor=text_node.get('editor', 'NULL'), - filename=os.path.basename(file), - institution=text_node.get('institution', 'NULL'), - journal=text_node.get('journal', 'NULL'), - pages=text_node.get('pages', 'NULL'), - publisher=text_node.get('publisher', 'NULL'), - publishing_year=text_node.get('publishing_year', ''), - school=text_node.get('school', 'NULL'), - title=text_node.get('title', 'NULL')) - db.session.add(corpus_file) - # finish import and got to imported corpus - url = url_for('corpora.corpus', corpus_id=corpus.id) - corpus.status = 'prepared' - db.session.commit() - os.remove(archive_file) - flash('[{}] imported'.format(url, - corpus.title), - 'corpus') - return make_response( - {'redirect_url': url_for('corpora.corpus', - corpus_id=corpus.id)}, - 201) - else: - # If imported zip is not valid delete corpus and give feedback - corpus.delete() - db.session.commit() - flash('Imported corpus is not valid.', 'error') - return make_response( - {'redirect_url': url_for('corpora.import_corpus')}, - 201) - return render_template('corpora/import_corpus.html.j2', - import_corpus_form=import_corpus_form, + # If imported zip is not valid delete corpus and give feedback + flash('Can not import corpus "{}" not imported: Invalid archive file!', 'error') # noqa + tasks.delete_corpus(corpus.id) + return make_response( + {'redirect_url': url_for('.import_corpus')}, 201) + return render_template('corpora/import_corpus.html.j2', form=form, title='Import Corpus') @@ -131,17 +118,9 @@ def corpus(corpus_id): corpus = Corpus.query.get_or_404(corpus_id) if not (corpus.creator == current_user or current_user.is_administrator()): abort(403) - corpus_files = [dict(filename=corpus_file.filename, - author=corpus_file.author, - title=corpus_file.title, - publishing_year=corpus_file.publishing_year, - corpus_id=corpus.id, - id=corpus_file.id) - for corpus_file in corpus.files] - return render_template('corpora/corpus.html.j2', - corpus=corpus, - corpus_files=corpus_files, - title='Corpus') + corpus_files = [corpus_file.to_dict() for corpus_file in corpus.files] + return render_template('corpora/corpus.html.j2', corpus=corpus, + corpus_files=corpus_files, title='Corpus') @corpora.route('//export') @@ -150,12 +129,11 @@ def export_corpus(corpus_id): corpus = Corpus.query.get_or_404(corpus_id) if not (corpus.creator == current_user or current_user.is_administrator()): abort(403) + # TODO: Check what happens here dir = os.path.dirname(corpus.archive_file) filename = os.path.basename(corpus.archive_file) - return send_from_directory(directory=dir, - filename=filename, - mimetype='zip', - as_attachment=True) + return send_from_directory(as_attachment=True, directory=dir, + filename=filename, mimetype='zip') @corpora.route('//analyse') @@ -168,7 +146,8 @@ def analyse_corpus(corpus_id): display_options_form = DisplayOptionsForm( prefix='display-options-form', result_context=request.args.get('context', 20), - results_per_page=request.args.get('results_per_page', 30)) + results_per_page=request.args.get('results_per_page', 30) + ) query_form = QueryForm(prefix='query-form', query=request.args.get('query')) query_download_form = QueryDownloadForm(prefix='query-download-form') @@ -177,12 +156,12 @@ def analyse_corpus(corpus_id): return render_template( 'corpora/analyse_corpus.html.j2', corpus=corpus, - corpus_id=corpus_id, display_options_form=display_options_form, + inspect_display_options_form=inspect_display_options_form, query_form=query_form, query_download_form=query_download_form, - inspect_display_options_form=inspect_display_options_form, - title='Corpus analysis') + title='Corpus analysis' + ) @corpora.route('//delete') @@ -191,8 +170,8 @@ def delete_corpus(corpus_id): corpus = Corpus.query.get_or_404(corpus_id) if not (corpus.creator == current_user or current_user.is_administrator()): abort(403) + flash('Corpus "{}" marked for deletion!'.format(corpus.title), 'corpus') tasks.delete_corpus(corpus_id) - flash('Corpus deleted!', 'corpus') return redirect(url_for('main.dashboard')) @@ -202,43 +181,33 @@ def add_corpus_file(corpus_id): corpus = Corpus.query.get_or_404(corpus_id) if not (corpus.creator == current_user or current_user.is_administrator()): abort(403) - add_corpus_file_form = AddCorpusFileForm(corpus, - prefix='add-corpus-file-form') - if add_corpus_file_form.is_submitted(): - if not add_corpus_file_form.validate(): - return make_response(add_corpus_file_form.errors, 400) + form = AddCorpusFileForm(corpus, prefix='add-corpus-file-form') + if form.is_submitted(): + if not form.validate(): + return make_response(form.errors, 400) # Save the file - dir = os.path.join(str(corpus.user_id), 'corpora', str(corpus.id)) - add_corpus_file_form.file.data.save( - os.path.join(current_app.config['DATA_DIR'], dir, - add_corpus_file_form.file.data.filename)) - corpus_file = CorpusFile( - address=add_corpus_file_form.address.data, - author=add_corpus_file_form.author.data, - booktitle=add_corpus_file_form.booktitle.data, - chapter=add_corpus_file_form.chapter.data, - corpus=corpus, - dir=dir, - editor=add_corpus_file_form.editor.data, - filename=add_corpus_file_form.file.data.filename, - institution=add_corpus_file_form.institution.data, - journal=add_corpus_file_form.journal.data, - pages=add_corpus_file_form.pages.data, - publisher=add_corpus_file_form.publisher.data, - publishing_year=add_corpus_file_form.publishing_year.data, - school=add_corpus_file_form.school.data, - title=add_corpus_file_form.title.data) + form.file.data.save(os.path.join(corpus.path, form.file.data.filename)) + corpus_file = CorpusFile(address=form.address.data, + author=form.author.data, + booktitle=form.booktitle.data, + chapter=form.chapter.data, + corpus=corpus, + editor=form.editor.data, + filename=form.file.data.filename, + institution=form.institution.data, + journal=form.journal.data, + pages=form.pages.data, + publisher=form.publisher.data, + publishing_year=form.publishing_year.data, + school=form.school.data, + title=form.title.data) db.session.add(corpus_file) corpus.status = 'unprepared' db.session.commit() - flash('Corpus file added!', 'corpus') - return make_response( - {'redirect_url': url_for('corpora.corpus', corpus_id=corpus.id)}, - 201) - return render_template('corpora/add_corpus_file.html.j2', - corpus=corpus, - add_corpus_file_form=add_corpus_file_form, - title='Add corpus file') + flash('Corpus file "{}" added!'.format(corpus_file.filename), 'corpus') + return make_response({'redirect_url': url_for('.corpus', corpus_id=corpus.id)}, 201) # noqa + return render_template('corpora/add_corpus_file.html.j2', corpus=corpus, + form=form, title='Add corpus file') @corpora.route('//files//delete') @@ -250,9 +219,9 @@ def delete_corpus_file(corpus_id, corpus_file_id): if not (corpus_file.corpus.creator == current_user or current_user.is_administrator()): abort(403) + flash('Corpus file "{}" marked for deletion!'.format(corpus_file.filename), 'corpus') # noqa tasks.delete_corpus_file(corpus_file_id) - flash('Corpus file deleted!', 'corpus') - return redirect(url_for('corpora.corpus', corpus_id=corpus_id)) + return redirect(url_for('.corpus', corpus_id=corpus_id)) @corpora.route('//files//download') @@ -264,9 +233,8 @@ def download_corpus_file(corpus_id, corpus_file_id): if not (corpus_file.corpus.creator == current_user or current_user.is_administrator()): abort(403) - dir = os.path.join(current_app.config['DATA_DIR'], - corpus_file.dir) - return send_from_directory(as_attachment=True, directory=dir, + return send_from_directory(as_attachment=True, + directory=corpus_file.corpus.path, filename=corpus_file.filename) @@ -274,48 +242,45 @@ def download_corpus_file(corpus_id, corpus_file_id): methods=['GET', 'POST']) @login_required def corpus_file(corpus_id, corpus_file_id): - corpus = Corpus.query.get_or_404(corpus_id) corpus_file = CorpusFile.query.get_or_404(corpus_file_id) - if not corpus_file.corpus_id == corpus_id: + if corpus_file.corpus_id != corpus_id: abort(404) if not (corpus_file.corpus.creator == current_user or current_user.is_administrator()): abort(403) - edit_corpus_file_form = EditCorpusFileForm(prefix='edit-corpus-file-form') - if edit_corpus_file_form.validate_on_submit(): - corpus_file.address = edit_corpus_file_form.address.data - corpus_file.author = edit_corpus_file_form.author.data - corpus_file.booktitle = edit_corpus_file_form.booktitle.data - corpus_file.chapter = edit_corpus_file_form.chapter.data - corpus_file.editor = edit_corpus_file_form.editor.data - corpus_file.institution = edit_corpus_file_form.institution.data - corpus_file.journal = edit_corpus_file_form.journal.data - corpus_file.pages = edit_corpus_file_form.pages.data - corpus_file.publisher = edit_corpus_file_form.publisher.data - corpus_file.publishing_year = \ - edit_corpus_file_form.publishing_year.data - corpus_file.school = edit_corpus_file_form.school.data - corpus_file.title = edit_corpus_file_form.title.data + form = EditCorpusFileForm(prefix='edit-corpus-file-form') + if form.validate_on_submit(): + corpus_file.address = form.address.data + corpus_file.author = form.author.data + corpus_file.booktitle = form.booktitle.data + corpus_file.chapter = form.chapter.data + corpus_file.editor = form.editor.data + corpus_file.institution = form.institution.data + corpus_file.journal = form.journal.data + corpus_file.pages = form.pages.data + corpus_file.publisher = form.publisher.data + corpus_file.publishing_year = form.publishing_year.data + corpus_file.school = form.school.data + corpus_file.title = form.title.data corpus.status = 'unprepared' db.session.commit() - flash('Corpus file edited!', 'corpus') - return redirect(url_for('corpora.corpus', corpus_id=corpus_id)) + flash('Corpus file "{}" edited!'.format(corpus_file.filename), 'corpus') # noqa + return redirect(url_for('.corpus', corpus_id=corpus_id)) # If no form is submitted or valid, fill out fields with current values - edit_corpus_file_form.address.data = corpus_file.address - edit_corpus_file_form.author.data = corpus_file.author - edit_corpus_file_form.booktitle.data = corpus_file.booktitle - edit_corpus_file_form.chapter.data = corpus_file.chapter - edit_corpus_file_form.editor.data = corpus_file.editor - edit_corpus_file_form.institution.data = corpus_file.institution - edit_corpus_file_form.journal.data = corpus_file.journal - edit_corpus_file_form.pages.data = corpus_file.pages - edit_corpus_file_form.publisher.data = corpus_file.publisher - edit_corpus_file_form.publishing_year.data = corpus_file.publishing_year - edit_corpus_file_form.school.data = corpus_file.school - edit_corpus_file_form.title.data = corpus_file.title - return render_template('corpora/corpus_file.html.j2', - corpus_file=corpus_file, corpus=corpus, - edit_corpus_file_form=edit_corpus_file_form, + form.address.data = corpus_file.address + form.author.data = corpus_file.author + form.booktitle.data = corpus_file.booktitle + form.chapter.data = corpus_file.chapter + form.editor.data = corpus_file.editor + form.institution.data = corpus_file.institution + form.journal.data = corpus_file.journal + form.pages.data = corpus_file.pages + form.publisher.data = corpus_file.publisher + form.publishing_year.data = corpus_file.publishing_year + form.school.data = corpus_file.school + form.title.data = corpus_file.title + return render_template('corpora/corpus_file.html.j2', corpus=corpus, + corpus_file=corpus_file, form=form, title='Edit corpus file') @@ -327,10 +292,10 @@ def prepare_corpus(corpus_id): abort(403) if corpus.files.all(): tasks.build_corpus(corpus_id) - flash('Building Corpus...', 'corpus') + flash('Corpus "{}" has been marked to get build!', 'corpus') else: - flash('Can not build corpus, please add corpus file(s).', 'corpus') - return redirect(url_for('corpora.corpus', corpus_id=corpus_id)) + flash('Can not build corpus "{}": No corpus file(s)!', 'error') + return redirect(url_for('.corpus', corpus_id=corpus_id)) # Following are view functions to add, view etc. exported results. @@ -340,35 +305,29 @@ def add_query_result(): ''' View to import a result as a json file. ''' - add_query_result_form = AddQueryResultForm(prefix='add-query-result-form') - if add_query_result_form.is_submitted(): - if not add_query_result_form.validate(): - return make_response(add_query_result_form.errors, 400) - query_result = QueryResult( - creator=current_user, - description=add_query_result_form.description.data, - filename=add_query_result_form.file.data.filename, - title=add_query_result_form.title.data - ) + form = AddQueryResultForm(prefix='add-query-result-form') + if form.is_submitted(): + if not form.validate(): + return make_response(form.errors, 400) + query_result = QueryResult(creator=current_user, + description=form.description.data, + filename=form.file.data.filename, + title=form.title.data) db.session.add(query_result) db.session.commit() - # create paths to save the uploaded json file - query_result_dir = os.path.join(current_app.config['DATA_DIR'], - str(current_user.id), - 'query_results', - str(query_result.id)) try: - os.makedirs(query_result_dir) - except Exception: + os.makedirs(query_result.path) + except OSError: + logging.error('Make dir {} led to an OSError!'.format(query_result.path)) # noqa db.session.delete(query_result) db.session.commit() flash('Internal Server Error', 'error') - redirect_url = url_for('corpora.add_query_result') - return make_response({'redirect_url': redirect_url}, 500) + return make_response( + {'redirect_url': url_for('.add_query_result')}, 500) # save the uploaded file - query_result_file_path = os.path.join(query_result_dir, + query_result_file_path = os.path.join(query_result.path, query_result.filename) - add_query_result_form.file.data.save(query_result_file_path) + form.file.data.save(query_result_file_path) # parse json from file with open(query_result_file_path, 'r') as file: query_result_file_content = json.load(file) @@ -381,19 +340,16 @@ def add_query_result(): except Exception: tasks.delete_query_result(query_result.id) flash('Uploaded file is invalid', 'result') - redirect_url = url_for('corpora.add_query_result') - return make_response({'redirect_url': redirect_url}, 201) + return make_response( + {'redirect_url': url_for('.add_query_result')}, 201) query_result_file_content.pop('matches') query_result_file_content.pop('cpos_lookup') query_result.query_metadata = query_result_file_content db.session.commit() flash('Query result added!', 'result') - redirect_url = url_for('corpora.query_result', - query_result_id=query_result.id) - return make_response({'redirect_url': redirect_url}, 201) + return make_response({'redirect_url': url_for('.query_result', query_result_id=query_result.id)}, 201) # noqa return render_template('corpora/query_results/add_query_result.html.j2', - add_query_result_form=add_query_result_form, - title='Add query result') + form=form, title='Add query result') @corpora.route('/result/') @@ -404,8 +360,7 @@ def query_result(query_result_id): or current_user.is_administrator()): abort(403) return render_template('corpora/query_results/query_result.html.j2', - query_result=query_result, - title='Query result') + query_result=query_result, title='Query result') @corpora.route('/result//inspect') @@ -427,13 +382,7 @@ def inspect_query_result(query_result_id): inspect_display_options_form = InspectDisplayOptionsForm( prefix='inspect-display-options-form' ) - query_result_file_path = os.path.join( - current_app.config['DATA_DIR'], - str(current_user.id), - 'query_results', - str(query_result.id), - query_result.filename - ) + query_result_file_path = os.path.join(query_result.path, query_result.filename) # noqa with open(query_result_file_path, 'r') as query_result_file: query_result_file_content = json.load(query_result_file) return render_template('corpora/query_results/inspect.html.j2', @@ -452,8 +401,8 @@ def delete_query_result(query_result_id): if not (query_result.creator == current_user or current_user.is_administrator()): abort(403) + flash('Query result "{}" has been marked for deletion!'.format(query_result), 'result') # noqa tasks.delete_query_result(query_result_id) - flash('Query result deleted!', 'result') return redirect(url_for('services.service', service="corpus_analysis")) @@ -464,10 +413,5 @@ def download_query_result(query_result_id): if not (query_result.creator == current_user or current_user.is_administrator()): abort(403) - query_result_dir = os.path.join(current_app.config['DATA_DIR'], - str(current_user.id), - 'query_results', - str(query_result.id)) - return send_from_directory(as_attachment=True, - directory=query_result_dir, + return send_from_directory(as_attachment=True, directory=query_result.path, filename=query_result.filename) diff --git a/web/app/email.py b/web/app/email.py index 4969b05e..4d9f0036 100644 --- a/web/app/email.py +++ b/web/app/email.py @@ -1,11 +1,11 @@ -from flask import render_template +from flask import current_app, render_template from flask_mail import Message from . import mail from .decorators import background def create_message(recipient, subject, template, **kwargs): - msg = Message('[nopaque] {}'.format(subject), recipients=[recipient]) + msg = Message('{} {}'.format(current_app.config['NOPAQUE_MAIL_SUBJECT_PREFIX'], subject), recipients=[recipient]) # noqa msg.body = render_template('{}.txt.j2'.format(template), **kwargs) msg.html = render_template('{}.html.j2'.format(template), **kwargs) return msg diff --git a/web/app/jobs/__init__.py b/web/app/jobs/__init__.py index 43e34614..07e0e1bb 100644 --- a/web/app/jobs/__init__.py +++ b/web/app/jobs/__init__.py @@ -2,4 +2,4 @@ from flask import Blueprint jobs = Blueprint('jobs', __name__) -from . import views # noqa +from . import views diff --git a/web/app/jobs/views.py b/web/app/jobs/views.py index a92013f7..739f153c 100644 --- a/web/app/jobs/views.py +++ b/web/app/jobs/views.py @@ -1,11 +1,10 @@ -from flask import (abort, current_app, flash, redirect, render_template, +from flask import (abort, flash, redirect, render_template, send_from_directory, url_for) from flask_login import current_user, login_required from . import jobs from . import tasks from ..decorators import admin_required from ..models import Job, JobInput, JobResult -import os @jobs.route('/') @@ -14,13 +13,8 @@ def job(job_id): job = Job.query.get_or_404(job_id) if not (job.creator == current_user or current_user.is_administrator()): abort(403) - job_inputs = [dict(filename=input.filename, - id=input.id, - job_id=job.id) - for input in job.inputs] - return render_template('jobs/job.html.j2', - job=job, - job_inputs=job_inputs, + job_inputs = [job_input.to_dict() for job_input in job.inputs] + return render_template('jobs/job.html.j2', job=job, job_inputs=job_inputs, title='Job') @@ -31,7 +25,7 @@ def delete_job(job_id): if not (job.creator == current_user or current_user.is_administrator()): abort(403) tasks.delete_job(job_id) - flash('Job has been deleted!', 'job') + flash('Job has been marked for deletion!', 'job') return redirect(url_for('main.dashboard')) @@ -44,9 +38,8 @@ def download_job_input(job_id, job_input_id): if not (job_input.job.creator == current_user or current_user.is_administrator()): abort(403) - dir = os.path.join(current_app.config['DATA_DIR'], - job_input.dir) - return send_from_directory(as_attachment=True, directory=dir, + return send_from_directory(as_attachment=True, + directory=job_input.job.path, filename=job_input.filename) @@ -56,11 +49,11 @@ def download_job_input(job_id, job_input_id): def restart(job_id): job = Job.query.get_or_404(job_id) if job.status != 'failed': - flash('Could not restart job: status is not "failed"', 'error') + flash('Can not restart job "{}": Status is not "failed"'.format(job.title), 'error') # noqa else: tasks.restart_job(job_id) - flash('Job has been restarted!', 'job') - return redirect(url_for('jobs.job', job_id=job_id)) + flash('Job "{}" has been marked to get restarted!'.format(job.title), 'job') # noqa + return redirect(url_for('.job', job_id=job_id)) @jobs.route('//results//download') @@ -72,7 +65,6 @@ def download_job_result(job_id, job_result_id): if not (job_result.job.creator == current_user or current_user.is_administrator()): abort(403) - dir = os.path.join(current_app.config['DATA_DIR'], - job_result.dir) - return send_from_directory(as_attachment=True, directory=dir, + return send_from_directory(as_attachment=True, + directory=job_result.job.path, filename=job_result.filename) diff --git a/web/app/main/__init__.py b/web/app/main/__init__.py index 74430247..d658fca7 100644 --- a/web/app/main/__init__.py +++ b/web/app/main/__init__.py @@ -2,4 +2,4 @@ from flask import Blueprint main = Blueprint('main', __name__) -from . import views # noqa +from . import views diff --git a/web/app/main/views.py b/web/app/main/views.py index fcc0ed5d..6f3816de 100644 --- a/web/app/main/views.py +++ b/web/app/main/views.py @@ -7,17 +7,16 @@ from ..models import User @main.route('/', methods=['GET', 'POST']) def index(): - login_form = LoginForm(prefix='login-form') - if login_form.validate_on_submit(): - user = User.query.filter_by(username=login_form.user.data).first() + form = LoginForm(prefix='login-form') + if form.validate_on_submit(): + user = User.query.filter_by(username=form.user.data).first() if user is None: - user = User.query.filter_by(email=login_form.user.data).first() - if user is not None and user.verify_password(login_form.password.data): - login_user(user, login_form.remember_me.data) - return redirect(url_for('main.dashboard')) + user = User.query.filter_by(email=form.user.data.lower()).first() + if user is not None and user.verify_password(form.password.data): + login_user(user, form.remember_me.data) + return redirect(url_for('.dashboard')) flash('Invalid email/username or password.') - return render_template('main/index.html.j2', login_form=login_form, - title='nopaque') + return render_template('main/index.html.j2', form=form, title='nopaque') @main.route('/about_and_faq') @@ -31,7 +30,6 @@ def dashboard(): return render_template('main/dashboard.html.j2', title='Dashboard') - @main.route('/news') def news(): return render_template('main/news.html.j2', title='News') @@ -40,12 +38,9 @@ def news(): @main.route('/privacy_policy') def privacy_policy(): return render_template('main/privacy_policy.html.j2', - title=('Information on the processing of personal' - ' data for the nopaque platform (GDPR)')) + title='Privacy statement (GDPR)') @main.route('/terms_of_use') def terms_of_use(): - return render_template('main/terms_of_use.html.j2', - title='General Terms of Use of the platform ' - 'nopaque') + return render_template('main/terms_of_use.html.j2', title='Terms of Use') diff --git a/web/app/models.py b/web/app/models.py index 4f72960a..6af02a2e 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -7,6 +7,7 @@ from werkzeug.security import generate_password_hash, check_password_hash from werkzeug.utils import secure_filename import xml.etree.ElementTree as ET from . import db, login_manager +import logging import os import shutil @@ -54,7 +55,7 @@ class Role(db.Model): ''' String representation of the Role. For human readability. ''' - return ''.format(role_name=self.name) + return ''.format(self.name) def add_permission(self, perm): ''' @@ -138,6 +139,18 @@ class User(UserMixin, db.Model): cascade='save-update, merge, delete', lazy='dynamic') + @property + def path(self): + return os.path.join(current_app.config['NOPAQUE_DATA_DIR'], str(self.id)) + + @property + def password(self): + raise AttributeError('password is not a readable attribute') + + @password.setter + def password(self, password): + self.password_hash = generate_password_hash(password) + def to_dict(self): return {'id': self.id, 'role_id': self.role_id, @@ -162,7 +175,7 @@ class User(UserMixin, db.Model): ''' String representation of the User. For human readability. ''' - return ''.format(username=self.username) + return ''.format(self.username) def __init__(self, **kwargs): super(User, self).__init__(**kwargs) @@ -220,14 +233,6 @@ class User(UserMixin, db.Model): db.session.add(user) return True - @property - def password(self): - raise AttributeError('password is not a readable attribute') - - @password.setter - def password(self, password): - self.password_hash = generate_password_hash(password) - def verify_password(self, password): return check_password_hash(self.password_hash, password) @@ -244,17 +249,11 @@ class User(UserMixin, db.Model): ''' return self.can(Permission.ADMIN) - def ping(self): - self.last_seen = datetime.utcnow() - db.session.add(self) - def delete(self): ''' Delete the user and its corpora and jobs from database and filesystem. ''' - user_dir = os.path.join(current_app.config['DATA_DIR'], - str(self.id)) - shutil.rmtree(user_dir, ignore_errors=True) + shutil.rmtree(self.path, ignore_errors=True) db.session.delete(self) @@ -280,14 +279,17 @@ class JobInput(db.Model): # Foreign keys job_id = db.Column(db.Integer, db.ForeignKey('jobs.id')) # Fields - dir = db.Column(db.String(255)) filename = db.Column(db.String(255)) + @property + def path(self): + return os.path.join(self.job.path, self.filename) + def __repr__(self): ''' String representation of the JobInput. For human readability. ''' - return ''.format(filename=self.filename) + return ''.format(self.filename) def to_dict(self): return {'id': self.id, @@ -305,14 +307,17 @@ class JobResult(db.Model): # Foreign keys job_id = db.Column(db.Integer, db.ForeignKey('jobs.id')) # Fields - dir = db.Column(db.String(255)) filename = db.Column(db.String(255)) + @property + def path(self): + return os.path.join(self.job.path, self.filename) + def __repr__(self): ''' String representation of the JobResult. For human readability. ''' - return ''.format(filename=self.filename) + return ''.format(self.filename) def to_dict(self): return {'id': self.id, @@ -351,19 +356,16 @@ class Job(db.Model): cascade='save-update, merge, delete') results = db.relationship('JobResult', backref='job', lazy='dynamic', cascade='save-update, merge, delete') - notification_data = db.relationship('NotificationData', - cascade='save-update, merge, delete', - uselist=False, - back_populates='job') # One-to-One relationship - notification_email_data = db.relationship('NotificationEmailData', - cascade='save-update, merge, delete', - back_populates='job') + + @property + def path(self): + return os.path.join(self.creator.path, 'jobs', str(self.id)) def __repr__(self): ''' String representation of the Job. For human readability. ''' - return ''.format(job_title=self.title) + return ''.format(self.title) def create_secure_filename(self): ''' @@ -385,11 +387,7 @@ class Job(db.Model): db.session.commit() sleep(1) db.session.refresh(self) - job_dir = os.path.join(current_app.config['DATA_DIR'], - str(self.user_id), - 'jobs', - str(self.id)) - shutil.rmtree(job_dir, ignore_errors=True) + shutil.rmtree(self.path, ignore_errors=True) db.session.delete(self) def restart(self): @@ -399,12 +397,8 @@ class Job(db.Model): if self.status != 'failed': raise Exception('Could not restart job: status is not "failed"') - job_dir = os.path.join(current_app.config['DATA_DIR'], - str(self.user_id), - 'jobs', - str(self.id)) - shutil.rmtree(os.path.join(job_dir, 'output'), ignore_errors=True) - shutil.rmtree(os.path.join(job_dir, 'pyflow.data'), ignore_errors=True) + shutil.rmtree(os.path.join(self.path, 'output'), ignore_errors=True) + shutil.rmtree(os.path.join(self.path, 'pyflow.data'), ignore_errors=True) # noqa self.end_date = None self.status = 'submitted' @@ -425,63 +419,6 @@ class Job(db.Model): for result in self.results}} -class NotificationData(db.Model): - ''' - Class to define notification data used for sending a notification mail with - nopaque_notify. - ''' - __tablename__ = 'notification_data' - # Primary key - id = db.Column(db.Integer, primary_key=True) - # Foreign Key - job_id = db.Column(db.Integer, db.ForeignKey('jobs.id')) - # relationships - job = db.relationship('Job', back_populates='notification_data') - # Fields - notified_on = db.Column(db.String(16), default=None) - - def __repr__(self): - ''' - String representation of the NotificationData. For human readability. - ''' - return ''.format(id=self.id) - - def to_dict(self): - return {'id': self.id, - 'job_id': self.job_id, - 'job': self.job, - 'notified': self.notified} - - -class NotificationEmailData(db.Model): - ''' - Class to define data that will be used to send a corresponding Notification - via email. - ''' - __tablename__ = 'notification_email_data' - # Primary Key - id = db.Column(db.Integer, primary_key=True) - # Foreign Key - job_id = db.Column(db.Integer, db.ForeignKey('jobs.id')) - # relationships - job = db.relationship('Job', back_populates='notification_email_data') - notify_status = db.Column(db.String(16), default=None) - creation_date = db.Column(db.DateTime(), default=datetime.utcnow) - - def __repr__(self): - ''' - String representation of the NotificationEmailData. For human readability. - ''' - return ''.format(id=self.id) - - def to_dict(self): - return {'id': self.id, - 'job_id': self.job_id, - 'job': self.job, - 'notify_status': self.notify_status, - 'creation_date': self.creation_date} - - class CorpusFile(db.Model): ''' Class to define Files. @@ -496,7 +433,6 @@ class CorpusFile(db.Model): author = db.Column(db.String(255)) booktitle = db.Column(db.String(255)) chapter = db.Column(db.String(255)) - dir = db.Column(db.String(255)) editor = db.Column(db.String(255)) filename = db.Column(db.String(255)) institution = db.Column(db.String(255)) @@ -507,15 +443,15 @@ class CorpusFile(db.Model): school = db.Column(db.String(255)) title = db.Column(db.String(255)) + @property + def path(self): + return os.path.join(self.corpus.path, self.filename) + def delete(self): - corpus_file_path = os.path.join(current_app.config['DATA_DIR'], - str(self.corpus.user_id), - 'corpora', - str(self.corpus_id), - self.filename) try: - os.remove(corpus_file_path) + os.remove(self.path) except OSError: + logging.error('Removing {} led to an OSError!'.format(self.path)) pass db.session.delete(self) self.corpus.status = 'unprepared' @@ -553,13 +489,17 @@ class Corpus(db.Model): description = db.Column(db.String(255)) last_edited_date = db.Column(db.DateTime(), default=datetime.utcnow) max_nr_of_tokens = db.Column(db.BigInteger, default=2147483647) - status = db.Column(db.String(16)) + status = db.Column(db.String(16), default='unprepared') title = db.Column(db.String(32)) archive_file = db.Column(db.String(255)) # Relationships files = db.relationship('CorpusFile', backref='corpus', lazy='dynamic', cascade='save-update, merge, delete') + @property + def path(self): + return os.path.join(self.creator.path, 'corpora', str(self.id)) + def to_dict(self): return {'id': self.id, 'user_id': self.user_id, @@ -571,19 +511,14 @@ class Corpus(db.Model): 'files': {file.id: file.to_dict() for file in self.files}} def build(self): - corpus_dir = os.path.join(current_app.config['DATA_DIR'], - str(self.user_id), - 'corpora', - str(self.id)) - output_dir = os.path.join(corpus_dir, 'merged') + output_dir = os.path.join(self.path, 'merged') shutil.rmtree(output_dir, ignore_errors=True) os.mkdir(output_dir) master_element_tree = ET.ElementTree( ET.fromstring('\n') ) for corpus_file in self.files: - corpus_file_path = os.path.join(corpus_dir, corpus_file.filename) - element_tree = ET.parse(corpus_file_path) + element_tree = ET.parse(corpus_file.path) text_node = element_tree.find('text') text_node.set('address', corpus_file.address or "NULL") text_node.set('author', corpus_file.author) @@ -597,7 +532,7 @@ class Corpus(db.Model): text_node.set('publishing_year', str(corpus_file.publishing_year)) text_node.set('school', corpus_file.school or "NULL") text_node.set('title', corpus_file.title) - element_tree.write(corpus_file_path) + element_tree.write(corpus_file.path) master_element_tree.getroot().insert(1, text_node) output_file = os.path.join(output_dir, 'corpus.vrt') master_element_tree.write(output_file, @@ -607,18 +542,14 @@ class Corpus(db.Model): self.status = 'submitted' def delete(self): - corpus_dir = os.path.join(current_app.config['DATA_DIR'], - str(self.user_id), - 'corpora', - str(self.id)) - shutil.rmtree(corpus_dir, ignore_errors=True) + shutil.rmtree(self.path, ignore_errors=True) db.session.delete(self) def __repr__(self): ''' String representation of the corpus. For human readability. ''' - return ''.format(corpus_title=self.title) + return ''.format(self.title) class QueryResult(db.Model): @@ -636,12 +567,12 @@ class QueryResult(db.Model): query_metadata = db.Column(db.JSON()) title = db.Column(db.String(32)) + @property + def path(self): + return os.path.join(self.creator.path, 'query_results', str(self.id)) + def delete(self): - query_result_dir = os.path.join(current_app.config['DATA_DIR'], - str(self.user_id), - 'query_results', - str(self.id)) - shutil.rmtree(query_result_dir, ignore_errors=True) + shutil.rmtree(self.path, ignore_errors=True) db.session.delete(self) def to_dict(self): @@ -654,7 +585,7 @@ class QueryResult(db.Model): def __repr__(self): ''' - String representation of the CorpusAnalysisResult. For human readability. + String representation of the QueryResult. For human readability. ''' return ''.format(self.title) diff --git a/web/app/query_results/views.py b/web/app/query_results/views.py deleted file mode 100644 index ff6eae5f..00000000 --- a/web/app/query_results/views.py +++ /dev/null @@ -1,150 +0,0 @@ -from . import query_results -from . import tasks -from .. import db -from ..corpora.forms import DisplayOptionsForm, InspectDisplayOptionsForm -from ..models import QueryResult -from .forms import AddQueryResultForm -from flask import (abort, current_app, flash, make_response, redirect, - render_template, request, send_from_directory, url_for) -from flask_login import current_user, login_required -import json -import os -from jsonschema import validate - - -@query_results.route('/add', methods=['GET', 'POST']) -@login_required -def add_query_result(): - ''' - View to import a result as a json file. - ''' - add_query_result_form = AddQueryResultForm(prefix='add-query-result-form') - if add_query_result_form.is_submitted(): - if not add_query_result_form.validate(): - return make_response(add_query_result_form.errors, 400) - query_result = QueryResult( - creator=current_user, - description=add_query_result_form.description.data, - filename=add_query_result_form.file.data.filename, - title=add_query_result_form.title.data - ) - db.session.add(query_result) - db.session.commit() - # create paths to save the uploaded json file - query_result_dir = os.path.join(current_app.config['DATA_DIR'], - str(current_user.id), - 'query_results', - str(query_result.id)) - try: - os.makedirs(query_result_dir) - except Exception: - db.session.delete(query_result) - db.session.commit() - flash('Internal Server Error', 'error') - redirect_url = url_for('query_results.add_query_result') - return make_response({'redirect_url': redirect_url}, 500) - # save the uploaded file - query_result_file_path = os.path.join(query_result_dir, - query_result.filename) - add_query_result_form.file.data.save(query_result_file_path) - # parse json from file - with open(query_result_file_path, 'r') as file: - query_result_file_content = json.load(file) - # parse json schema - with open('app/static/json_schema/nopaque_cqi_py_results_schema.json', 'r') as file: # noqa - schema = json.load(file) - try: - # validate imported json file - validate(instance=query_result_file_content, schema=schema) - except Exception: - tasks.delete_query_result(query_result.id) - flash('Uploaded file is invalid', 'result') - redirect_url = url_for('query_results.add_query_result') - return make_response({'redirect_url': redirect_url}, 201) - query_result_file_content.pop('matches') - query_result_file_content.pop('cpos_lookup') - query_result.query_metadata = query_result_file_content - db.session.commit() - flash('Query result added!', 'result') - redirect_url = url_for('query_results.query_result', - query_result_id=query_result.id) - return make_response({'redirect_url': redirect_url}, 201) - return render_template('corpora/query_results/add_query_result.html.j2', - add_query_result_form=add_query_result_form, - title='Add query result') - - -@query_results.route('/') -@login_required -def query_result(query_result_id): - query_result = QueryResult.query.get_or_404(query_result_id) - if not (query_result.creator == current_user - or current_user.is_administrator()): - abort(403) - return render_template('corpora/query_results/query_result.html.j2', - query_result=query_result, - title='Query result') - - -@query_results.route('//inspect') -@login_required -def inspect_query_result(query_result_id): - ''' - View to inspect imported result file in a corpus analysis like interface - ''' - query_result = QueryResult.query.get_or_404(query_result_id) - query_metadata = query_result.query_metadata - if not (query_result.creator == current_user - or current_user.is_administrator()): - abort(403) - display_options_form = DisplayOptionsForm( - prefix='display-options-form', - results_per_page=request.args.get('results_per_page', 30), - result_context=request.args.get('context', 20) - ) - inspect_display_options_form = InspectDisplayOptionsForm( - prefix='inspect-display-options-form' - ) - query_result_file_path = os.path.join( - current_app.config['DATA_DIR'], - str(current_user.id), - 'query_results', - str(query_result.id), - query_result.filename - ) - with open(query_result_file_path, 'r') as query_result_file: - query_result_file_content = json.load(query_result_file) - return render_template('corpora/query_results/inspect.html.j2', - display_options_form=display_options_form, - inspect_display_options_form=inspect_display_options_form, - query_result_file_content=query_result_file_content, - query_metadata=query_metadata, - title='Inspect query result') - - -@query_results.route('//delete') -@login_required -def delete_query_result(query_result_id): - query_result = QueryResult.query.get_or_404(query_result_id) - if not (query_result.creator == current_user - or current_user.is_administrator()): - abort(403) - tasks.delete_query_result(query_result_id) - flash('Query result deleted!', 'result') - return redirect(url_for('services.service', service="corpus_analysis")) - - -@query_results.route('//download') -@login_required -def download_query_result(query_result_id): - query_result = QueryResult.query.get_or_404(query_result_id) - if not (query_result.creator == current_user - or current_user.is_administrator()): - abort(403) - query_result_dir = os.path.join(current_app.config['DATA_DIR'], - str(current_user.id), - 'query_results', - str(query_result.id)) - return send_from_directory(as_attachment=True, - directory=query_result_dir, - filename=query_result.filename) diff --git a/web/app/services/__init__.py b/web/app/services/__init__.py index 0bc0cfb2..ea9a403f 100644 --- a/web/app/services/__init__.py +++ b/web/app/services/__init__.py @@ -2,4 +2,4 @@ from flask import Blueprint services = Blueprint('services', __name__) -from . import views # noqa +from . import views diff --git a/web/app/services/views.py b/web/app/services/views.py index 6fbf2ef0..a6567985 100644 --- a/web/app/services/views.py +++ b/web/app/services/views.py @@ -1,5 +1,4 @@ -from flask import (abort, current_app, flash, make_response, render_template, - url_for) +from flask import abort, flash, make_response, render_template, url_for from flask_login import current_user, login_required from werkzeug.utils import secure_filename from . import services @@ -7,19 +6,20 @@ from .. import db from ..jobs.forms import AddFileSetupJobForm, AddNLPJobForm, AddOCRJobForm from ..models import Job, JobInput import json +import logging import os SERVICES = {'corpus_analysis': {'name': 'Corpus analysis'}, 'file-setup': {'name': 'File setup', 'resources': {'mem_mb': 4096, 'n_cores': 4}, - 'add_job_form': AddFileSetupJobForm}, + 'form': AddFileSetupJobForm}, 'nlp': {'name': 'Natural Language Processing', 'resources': {'mem_mb': 4096, 'n_cores': 2}, - 'add_job_form': AddNLPJobForm}, + 'form': AddNLPJobForm}, 'ocr': {'name': 'Optical Character Recognition', 'resources': {'mem_mb': 8192, 'n_cores': 4}, - 'add_job_form': AddOCRJobForm}} + 'form': AddOCRJobForm}} @services.route('/', methods=['GET', 'POST']) @@ -30,54 +30,49 @@ def service(service): if service == 'corpus_analysis': return render_template('services/{}.html.j2'.format(service), title=SERVICES[service]['name']) - add_job_form = SERVICES[service]['add_job_form'](prefix='add-job-form') - if add_job_form.is_submitted(): - if not add_job_form.validate(): - return make_response(add_job_form.errors, 400) + form = SERVICES[service]['form'](prefix='add-job-form') + if form.is_submitted(): + if not form.validate(): + return make_response(form.errors, 400) service_args = [] if service == 'nlp': - service_args.append('-l {}'.format(add_job_form.language.data)) - if add_job_form.check_encoding.data: + service_args.append('-l {}'.format(form.language.data)) + if form.check_encoding.data: service_args.append('--check-encoding') if service == 'ocr': - service_args.append('-l {}'.format(add_job_form.language.data)) - if add_job_form.binarization.data: + service_args.append('-l {}'.format(form.language.data)) + if form.binarization.data: service_args.append('--binarize') job = Job(creator=current_user, - description=add_job_form.description.data, + description=form.description.data, mem_mb=SERVICES[service]['resources']['mem_mb'], n_cores=SERVICES[service]['resources']['n_cores'], service=service, service_args=json.dumps(service_args), - service_version=add_job_form.version.data, - status='preparing', title=add_job_form.title.data) + service_version=form.version.data, + status='preparing', title=form.title.data) if job.service != 'corpus_analysis': job.create_secure_filename() db.session.add(job) db.session.commit() - relative_dir = os.path.join(str(job.user_id), 'jobs', str(job.id)) - absolut_dir = os.path.join(current_app.config['DATA_DIR'], - relative_dir) try: - os.makedirs(absolut_dir) + os.makedirs(job.path) except OSError: - job.delete() - flash('Internal Server Error', 'job') - return make_response({'redirect_url': url_for('services.service', - service=service)}, - 500) + logging.error('Make dir {} led to an OSError!'.format(job.path)) + db.session.delete(job) + db.session.commit() + flash('Internal Server Error', 'error') + return make_response( + {'redirect_url': url_for('.service', service=service)}, 500) else: - for file in add_job_form.files.data: + for file in form.files.data: filename = secure_filename(file.filename) - file.save(os.path.join(absolut_dir, filename)) - job_input = JobInput(dir=relative_dir, filename=filename, - job=job) + job_input = JobInput(dir=job.path, filename=filename, job=job) + file.save(job_input.path) db.session.add(job_input) job.status = 'submitted' db.session.commit() - url = url_for('jobs.job', job_id=job.id) - flash('[{}] added'.format(url, job.title), 'job') + flash('Job "{}" added'.format(job.title), 'job') return make_response( {'redirect_url': url_for('jobs.job', job_id=job.id)}, 201) return render_template('services/{}.html.j2'.format(service), - title=SERVICES[service]['name'], - add_job_form=add_job_form) + form=form, title=SERVICES[service]['name']) diff --git a/web/app/settings/forms.py b/web/app/settings/forms.py index 6f7abeef..5c822fd9 100644 --- a/web/app/settings/forms.py +++ b/web/app/settings/forms.py @@ -35,7 +35,7 @@ class EditGeneralSettingsForm(FlaskForm): 'Benutzername', validators=[DataRequired(), Length(1, 64), - Regexp(current_app.config['ALLOWED_USERNAME_REGEX'], + Regexp(current_app.config['NOPAQUE_USERNAME_REGEX'], message='Usernames must have only letters, numbers,' ' dots or underscores')] ) diff --git a/web/app/settings/views.py b/web/app/settings/views.py index 1bd4a07f..a7fc0b38 100644 --- a/web/app/settings/views.py +++ b/web/app/settings/views.py @@ -1,13 +1,9 @@ -from flask import current_app, flash, redirect, render_template, url_for +from flask import flash, redirect, render_template, url_for from flask_login import current_user, login_required, logout_user from . import settings, tasks from .forms import (ChangePasswordForm, EditGeneralSettingsForm, EditNotificationSettingsForm) from .. import db -from ..decorators import admin_required -from ..models import Role, User -import os -import uuid @settings.route('/') @@ -26,8 +22,7 @@ def change_password(): flash('Your password has been updated.') return redirect(url_for('.change_password')) return render_template('settings/change_password.html.j2', - form=form, - title='Change password') + form=form, title='Change password') @settings.route('/edit_general_settings', methods=['GET', 'POST']) @@ -40,12 +35,12 @@ def edit_general_settings(): current_user.username = form.username.data db.session.commit() flash('Your changes have been saved.') + return redirect(url_for('.edit_general_settings')) form.dark_mode.data = current_user.setting_dark_mode form.email.data = current_user.email form.username.data = current_user.username return render_template('settings/edit_general_settings.html.j2', - form=form, - title='General settings') + form=form, title='General settings') @settings.route('/edit_notification_settings', methods=['GET', 'POST']) @@ -59,13 +54,13 @@ def edit_notification_settings(): form.job_status_site_notifications.data db.session.commit() flash('Your changes have been saved.') + return redirect(url_for('.edit_notification_settings')) form.job_status_mail_notifications.data = \ current_user.setting_job_status_mail_notifications form.job_status_site_notifications.data = \ current_user.setting_job_status_site_notifications return render_template('settings/edit_notification_settings.html.j2', - form=form, - title='Notification settings') + form=form, title='Notification settings') @settings.route('/delete') @@ -76,5 +71,5 @@ def delete(): """ tasks.delete_user(current_user.id) logout_user() - flash('Your account has been deleted!') + flash('Your account has been marked for deletion!') return redirect(url_for('main.index')) diff --git a/web/app/tasks/__init__.py b/web/app/tasks/__init__.py index ba33a1fe..9bd21af6 100644 --- a/web/app/tasks/__init__.py +++ b/web/app/tasks/__init__.py @@ -11,15 +11,11 @@ def check_corpora(): corpora = Corpus.query.all() for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora): corpus_utils.create_build_corpus_service(corpus) - for corpus in filter(lambda corpus: (corpus.status == 'queued' - or corpus.status == 'running'), - corpora): + for corpus in filter(lambda corpus: corpus.status in ['queued', 'running'], corpora): # noqa corpus_utils.checkout_build_corpus_service(corpus) - for corpus in filter(lambda corpus: corpus.status == 'start analysis', - corpora): + for corpus in filter(lambda corpus: corpus.status == 'start analysis', corpora): # noqa corpus_utils.create_cqpserver_container(corpus) - for corpus in filter(lambda corpus: corpus.status == 'stop analysis', - corpora): + for corpus in filter(lambda corpus: corpus.status == 'stop analysis', corpora): # noqa corpus_utils.remove_cqpserver_container(corpus) db.session.commit() @@ -28,8 +24,6 @@ def check_jobs(): jobs = Job.query.all() for job in filter(lambda job: job.status == 'submitted', jobs): job_utils.create_job_service(job) - for job in filter(lambda job: job.status == 'queued', jobs): - job_utils.checkout_job_service(job) - for job in filter(lambda job: job.status == 'running', jobs): + for job in filter(lambda job: job.status in ['queued', 'running'], jobs): job_utils.checkout_job_service(job) db.session.commit() diff --git a/web/app/tasks/corpus_utils.py b/web/app/tasks/corpus_utils.py index c06b19ac..dd37ad62 100644 --- a/web/app/tasks/corpus_utils.py +++ b/web/app/tasks/corpus_utils.py @@ -1,4 +1,3 @@ -from flask import current_app from . import docker_client import docker import logging @@ -7,20 +6,14 @@ import shutil def create_build_corpus_service(corpus): - corpus_dir = os.path.join(current_app.config['DATA_DIR'], - str(corpus.user_id), - 'corpora', - str(corpus.id)) - corpus_data_dir = os.path.join(corpus_dir, 'data') - corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt') - corpus_registry_dir = os.path.join(corpus_dir, 'registry') - if os.path.exists(corpus_data_dir): - shutil.rmtree(corpus_data_dir) - if os.path.exists(corpus_registry_dir): - shutil.rmtree(corpus_registry_dir) + corpus_data_dir = os.path.join(corpus.path, 'data') + shutil.rmtree(corpus_data_dir, ignore_errors=True) os.mkdir(corpus_data_dir) + corpus_registry_dir = os.path.join(corpus.path, 'registry') + shutil.rmtree(corpus_registry_dir, ignore_errors=True) os.mkdir(corpus_registry_dir) - service_args = { + corpus_file = os.path.join(corpus.path, 'merged', 'corpus.vrt') + service_kwargs = { 'command': 'docker-entrypoint.sh build-corpus', 'constraints': ['node.role==worker'], 'labels': {'origin': 'nopaque', @@ -32,30 +25,34 @@ def create_build_corpus_service(corpus): 'name': 'build-corpus_{}'.format(corpus.id), 'restart_policy': docker.types.RestartPolicy() } - service_image = \ - 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' + service_image = 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' # noqa try: - docker_client.services.create(service_image, **service_args) + docker_client.services.create(service_image, **service_kwargs) except docker.errors.APIError as e: - logging.error('create_build_corpus_service({}): '.format(corpus.id) - + '{} (status: {} -> failed)'.format(e, corpus.status)) - corpus.status = 'failed' + logging.error('Create "{}" service raised '.format(service_kwargs['name']) # noqa + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) else: corpus.status = 'queued' - finally: - # TODO: send email - pass def checkout_build_corpus_service(corpus): service_name = 'build-corpus_{}'.format(corpus.id) try: service = docker_client.services.get(service_name) - except docker.errors.NotFound as e: - logging.error('checkout_build_corpus_service({}):'.format(corpus.id) - + ' {} (stauts: {} -> failed)'.format(e, corpus.status)) + except docker.errors.NotFound: + logging.error('Get "{}" service raised '.format(service_name) + + '[docker-NotFound] The service does not exist. ' + + '(corpus.status: {} -> failed)'.format(corpus.status)) corpus.status = 'failed' - # TODO: handle docker.errors.APIError and docker.errors.InvalidVersion + except docker.errors.APIError as e: + logging.error('Get "{}" service raised '.format(service_name) + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) + except docker.errors.InvalidVersion: + logging.error('Get "{}" service raised '.format(service_name) + + '[docker-InvalidVersion] One of the arguments is ' + + 'not supported with the current API version.') else: service_tasks = service.tasks() if not service_tasks: @@ -63,25 +60,23 @@ def checkout_build_corpus_service(corpus): task_state = service_tasks[0].get('Status').get('State') if corpus.status == 'queued' and task_state != 'pending': corpus.status = 'running' - elif corpus.status == 'running' and task_state == 'complete': - service.remove() - corpus.status = 'prepared' - elif corpus.status == 'running' and task_state == 'failed': - service.remove() - corpus.status = task_state - finally: - # TODO: send email - pass + elif corpus.status == 'running' and task_state in ['complete', 'failed']: # noqa + try: + service.remove() + except docker.errors.APIError as e: + logging.error('Remove "{}" service raised '.format(service_name) # noqa + + '[docker-APIError] The server returned an error. ' # noqa + + 'Details: {}'.format(e)) + return + else: + corpus.status = 'prepared' if task_state == 'complete' \ + else 'failed' def create_cqpserver_container(corpus): - corpus_dir = os.path.join(current_app.config['DATA_DIR'], - str(corpus.user_id), - 'corpora', - str(corpus.id)) - corpus_data_dir = os.path.join(corpus_dir, 'data') - corpus_registry_dir = os.path.join(corpus_dir, 'registry') - container_args = { + corpus_data_dir = os.path.join(corpus.path, 'data') + corpus_registry_dir = os.path.join(corpus.path, 'registry') + container_kwargs = { 'command': 'cqpserver', 'detach': True, 'volumes': [corpus_data_dir + ':/corpora/data:rw', @@ -89,20 +84,43 @@ def create_cqpserver_container(corpus): 'name': 'cqpserver_{}'.format(corpus.id), 'network': 'nopaque_default' } - container_image = \ - 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' + container_image = 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' # noqa + # Check if a cqpserver container already exists. If this is the case, + # remove it and create a new one try: - container = docker_client.containers.get(container_args['name']) + container = docker_client.containers.get(container_kwargs['name']) except docker.errors.NotFound: pass - except docker.errors.DockerException: + except docker.errors.APIError as e: + logging.error('Get "{}" container raised '.format(container_kwargs['name']) + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) return else: - container.remove(force=True) + try: + container.remove(force=True) + except docker.errors.APIError as e: + logging.error('Remove "{}" container raised '.format(container_kwargs['name']) + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) + return try: - docker_client.containers.run(container_image, **container_args) - except docker.errors.DockerException: - return + docker_client.containers.run(container_image, **container_kwargs) + except docker.errors.ContainerError: + # This case should not occur, because detach is True. + logging.error('Run "{}" container raised '.format(container_kwargs['name']) + + '[docker-ContainerError] The container exits with a ' + + 'non-zero exit code and detach is False.') + corpus.status = 'failed' + except docker.errors.ImageNotFound: + logging.error('Run "{}" container raised '.format(container_kwargs['name']) + + '[docker-ImageNotFound] The specified image does not ' + + 'exist.') + corpus.status = 'failed' + except docker.errors.APIError as e: + logging.error('Run "{}" container raised '.format(container_kwargs['name']) + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) else: corpus.status = 'analysing' @@ -113,8 +131,17 @@ def remove_cqpserver_container(corpus): container = docker_client.containers.get(container_name) except docker.errors.NotFound: pass - except docker.errors.DockerException: + except docker.errors.APIError as e: + logging.error('Get "{}" container raised '.format(container_name) + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) return else: - container.remove(force=True) + try: + container.remove(force=True) + except docker.errors.APIError as e: + logging.error('Remove "{}" container raised '.format(container_name) + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) + return corpus.status = 'prepared' diff --git a/web/app/tasks/job_utils.py b/web/app/tasks/job_utils.py index 2094e7cd..68db9507 100644 --- a/web/app/tasks/job_utils.py +++ b/web/app/tasks/job_utils.py @@ -1,7 +1,7 @@ from datetime import datetime -from flask import current_app from . import docker_client from .. import db +from ..email import create_message, send from ..models import JobResult import docker import logging @@ -10,51 +10,60 @@ import os def create_job_service(job): - job_dir = os.path.join(current_app.config['DATA_DIR'], - str(job.user_id), - 'jobs', - str(job.id)) cmd = '{} -i /files -o /files/output'.format(job.service) if job.service == 'file-setup': cmd += ' -f {}'.format(job.secure_filename) cmd += ' --log-dir /files' cmd += ' --zip [{}]_{}'.format(job.service, job.secure_filename) cmd += ' ' + ' '.join(json.loads(job.service_args)) - service_args = {'command': cmd, - 'constraints': ['node.role==worker'], - 'labels': {'origin': 'nopaque', - 'type': 'service.{}'.format(job.service), - 'job_id': str(job.id)}, - 'mounts': [job_dir + ':/files:rw'], - 'name': 'job_{}'.format(job.id), - 'resources': docker.types.Resources( - cpu_reservation=job.n_cores * (10 ** 9), - mem_reservation=job.mem_mb * (10 ** 6)), - 'restart_policy': docker.types.RestartPolicy()} + service_kwargs = {'command': cmd, + 'constraints': ['node.role==worker'], + 'labels': {'origin': 'nopaque', + 'type': 'service.{}'.format(job.service), + 'job_id': str(job.id)}, + 'mounts': [job.path + ':/files:rw'], + 'name': 'job_{}'.format(job.id), + 'resources': docker.types.Resources( + cpu_reservation=job.n_cores * (10 ** 9), + mem_reservation=job.mem_mb * (10 ** 6) + ), + 'restart_policy': docker.types.RestartPolicy()} service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/' + job.service + ':' + job.service_version) try: - docker_client.services.create(service_image, **service_args) + docker_client.services.create(service_image, **service_kwargs) except docker.errors.APIError as e: - logging.error('create_job_service({}): {} '.format(job.id, e) - + '(status: {} -> failed)'.format(job.status)) - job.status = 'failed' + logging.error('Create "{}" service raised '.format(service_kwargs['name']) # noqa + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) else: job.status = 'queued' - finally: - # TODO: send email - pass + msg = create_message( + job.creator.email, + 'Status update for your Job "{}"'.format(job.title), + 'tasks/email/notification', + job=job + ) + send(msg) def checkout_job_service(job): service_name = 'job_{}'.format(job.id) try: service = docker_client.services.get(service_name) - except docker.errors.NotFound as e: - logging.error('checkout_job_service({}): {} '.format(job.id, e) - + '(status: {} -> submitted)'.format(job.status)) - job.status = 'submitted' - # TODO: handle docker.errors.APIError and docker.errors.InvalidVersion + except docker.errors.NotFound: + logging.error('Get "{}" service raised '.format(service_name) + + '[docker-NotFound] The service does not exist. ' + + '(job.status: {} -> failed)'.format(job.status)) + job.status = 'failed' + except docker.errors.APIError as e: + logging.error('Get "{}" service raised '.format(service_name) + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) + except docker.errors.InvalidVersion: + logging.error('Get "{}" service raised '.format(service_name) + + '[docker-InvalidVersion] One of the arguments is ' + + 'not supported with the current API version.') else: service_tasks = service.tasks() if not service_tasks: @@ -62,22 +71,16 @@ def checkout_job_service(job): task_state = service_tasks[0].get('Status').get('State') if job.status == 'queued' and task_state != 'pending': job.status = 'running' - elif job.status == 'queued' and task_state == 'complete': + elif job.status == 'running' and task_state == 'complete': service.remove() job.end_date = datetime.utcnow() job.status = task_state if task_state == 'complete': - results_dir = os.path.join(current_app.config['DATA_DIR'], - str(job.user_id), - 'jobs', - str(job.id), - 'output') - results = filter(lambda x: x.endswith('.zip'), - os.listdir(results_dir)) - for result in results: - job_result = JobResult(dir=results_dir, - filename=result, - job_id=job.id) + job_results_dir = os.path.join(job.path, 'output') + job_results = filter(lambda x: x.endswith('.zip'), + os.listdir(job_results_dir)) + for job_result in job_results: + job_result = JobResult(filename=job_result, job=job) db.session.add(job_result) elif job.status == 'running' and task_state == 'failed': service.remove() @@ -85,6 +88,13 @@ def checkout_job_service(job): job.status = task_state finally: # TODO: send email + msg = create_message( + job.creator.email, + '[nopaque] Status update for your Job "{}"'.format(job.title), + 'tasks/email/notification', + job=job + ) + send(msg) pass diff --git a/web/app/templates/auth/login.html.j2 b/web/app/templates/auth/login.html.j2 index fa99f31a..db83e8b0 100644 --- a/web/app/templates/auth/login.html.j2 +++ b/web/app/templates/auth/login.html.j2 @@ -35,20 +35,20 @@
- {{ login_form.hidden_tag() }} - {{ wtf.render_field(login_form.user, material_icon='person') }} - {{ wtf.render_field(login_form.password, material_icon='vpn_key') }} + {{ form.hidden_tag() }} + {{ wtf.render_field(form.user, material_icon='person') }} + {{ wtf.render_field(form.password, material_icon='vpn_key') }}
- {{ wtf.render_field(login_form.remember_me) }} + {{ wtf.render_field(form.remember_me) }}
- {{ wtf.render_field(login_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
diff --git a/web/app/templates/auth/register.html.j2 b/web/app/templates/auth/register.html.j2 index e41990b3..7b8db08d 100644 --- a/web/app/templates/auth/register.html.j2 +++ b/web/app/templates/auth/register.html.j2 @@ -34,14 +34,14 @@
- {{ registration_form.hidden_tag() }} - {{ wtf.render_field(registration_form.username, data_length='64', material_icon='person') }} - {{ wtf.render_field(registration_form.password, data_length='128', material_icon='vpn_key') }} - {{ wtf.render_field(registration_form.password_confirmation, data_length='128', material_icon='vpn_key') }} - {{ wtf.render_field(registration_form.email, class_='validate', material_icon='email', type='email') }} + {{ form.hidden_tag() }} + {{ wtf.render_field(form.username, data_length='64', material_icon='person') }} + {{ wtf.render_field(form.password, data_length='128', material_icon='vpn_key') }} + {{ wtf.render_field(form.password_confirmation, data_length='128', material_icon='vpn_key') }} + {{ wtf.render_field(form.email, class_='validate', material_icon='email', type='email') }}
- {{ wtf.render_field(registration_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
diff --git a/web/app/templates/auth/reset_password.html.j2 b/web/app/templates/auth/reset_password.html.j2 index 21da27db..4002c158 100644 --- a/web/app/templates/auth/reset_password.html.j2 +++ b/web/app/templates/auth/reset_password.html.j2 @@ -20,12 +20,12 @@
- {{ reset_password_form.hidden_tag() }} - {{ wtf.render_field(reset_password_form.password, data_length='128') }} - {{ wtf.render_field(reset_password_form.password_confirmation, data_length='128') }} + {{ form.hidden_tag() }} + {{ wtf.render_field(form.password, data_length='128') }} + {{ wtf.render_field(form.password_confirmation, data_length='128') }}
- {{ wtf.render_field(reset_password_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
diff --git a/web/app/templates/auth/reset_password_request.html.j2 b/web/app/templates/auth/reset_password_request.html.j2 index 0a2baf9f..07a0808e 100644 --- a/web/app/templates/auth/reset_password_request.html.j2 +++ b/web/app/templates/auth/reset_password_request.html.j2 @@ -20,11 +20,11 @@
- {{ reset_password_request_form.hidden_tag() }} - {{ wtf.render_field(reset_password_request_form.email, class_='validate', material_icon='email', type='email') }} + {{ form.hidden_tag() }} + {{ wtf.render_field(form.email, class_='validate', material_icon='email', type='email') }}
- {{ wtf.render_field(reset_password_request_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
diff --git a/web/app/templates/corpora/add_corpus.html.j2 b/web/app/templates/corpora/add_corpus.html.j2 index 219b098f..c5e5e3b9 100644 --- a/web/app/templates/corpora/add_corpus.html.j2 +++ b/web/app/templates/corpora/add_corpus.html.j2 @@ -27,18 +27,18 @@
- {{ add_corpus_form.hidden_tag() }} + {{ form.hidden_tag() }}
- {{ wtf.render_field(add_corpus_form.title, data_length='32', material_icon='title') }} + {{ wtf.render_field(form.title, data_length='32', material_icon='title') }}
- {{ wtf.render_field(add_corpus_form.description, data_length='255', material_icon='description') }} + {{ wtf.render_field(form.description, data_length='255', material_icon='description') }}
- {{ wtf.render_field(add_corpus_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
diff --git a/web/app/templates/corpora/add_corpus_file.html.j2 b/web/app/templates/corpora/add_corpus_file.html.j2 index 4ca54776..14b09304 100644 --- a/web/app/templates/corpora/add_corpus_file.html.j2 +++ b/web/app/templates/corpora/add_corpus_file.html.j2 @@ -27,24 +27,24 @@
- {{ add_corpus_file_form.hidden_tag() }} + {{ form.hidden_tag() }}
- {{ wtf.render_field(add_corpus_file_form.author, data_length='255', material_icon='person') }} + {{ wtf.render_field(form.author, data_length='255', material_icon='person') }}
- {{ wtf.render_field(add_corpus_file_form.title, data_length='255', material_icon='title') }} + {{ wtf.render_field(form.title, data_length='255', material_icon='title') }}
- {{ wtf.render_field(add_corpus_file_form.publishing_year, material_icon='access_time') }} + {{ wtf.render_field(form.publishing_year, material_icon='access_time') }}
- {{ wtf.render_field(add_corpus_file_form.file, accept='.vrt', placeholder='Choose your .vrt file') }} + {{ wtf.render_field(form.file, accept='.vrt', placeholder='Choose your .vrt file') }}
- {{ wtf.render_field(add_corpus_file_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}

@@ -52,7 +52,7 @@
  • addAdd additional metadata
    - {% for field in add_corpus_file_form + {% for field in form if field.short_name not in ['author', 'csrf_token', 'file', 'publishing_year', 'submit', 'title'] %} {{ wtf.render_field(field, data_length='255', material_icon=field.label.text[0:1]) }} {% endfor %} diff --git a/web/app/templates/corpora/analyse_corpus.html.j2 b/web/app/templates/corpora/analyse_corpus.html.j2 index 29dba027..af44f86f 100644 --- a/web/app/templates/corpora/analyse_corpus.html.j2 +++ b/web/app/templates/corpora/analyse_corpus.html.j2 @@ -155,7 +155,7 @@ import { */ document.addEventListener("DOMContentLoaded", () => { // Initialize the client for server client communication in dynamic mode - let corpusId = {{ corpus_id }} + let corpusId = {{ corpus.id }} const client = new Client({'corpusId': corpusId, 'socket': nopaque.socket, 'logging': true, diff --git a/web/app/templates/corpora/corpus_file.html.j2 b/web/app/templates/corpora/corpus_file.html.j2 index 7548604b..d8022303 100644 --- a/web/app/templates/corpora/corpus_file.html.j2 +++ b/web/app/templates/corpora/corpus_file.html.j2 @@ -20,23 +20,23 @@
    - {{ edit_corpus_file_form.hidden_tag() }} + {{ form.hidden_tag() }}
    - {{ wtf.render_field(edit_corpus_file_form.author, data_length='255', material_icon='person') }} + {{ wtf.render_field(form.author, data_length='255', material_icon='person') }}
    - {{ wtf.render_field(edit_corpus_file_form.title, data_length='255', material_icon='title') }} + {{ wtf.render_field(form.title, data_length='255', material_icon='title') }}
    - {{ wtf.render_field(edit_corpus_file_form.publishing_year, material_icon='access_time') }} + {{ wtf.render_field(form.publishing_year, material_icon='access_time') }}
    - {{ wtf.render_field(edit_corpus_file_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}

    @@ -44,7 +44,7 @@
  • editEdit additional metadata
    - {% for field in edit_corpus_file_form + {% for field in form if field.short_name not in ['author', 'csrf_token', 'publishing_year', 'submit', 'title'] %} {{ wtf.render_field(field, data_length='255', material_icon=field.label.text[0:1]) }} {% endfor %} diff --git a/web/app/templates/corpora/import_corpus.html.j2 b/web/app/templates/corpora/import_corpus.html.j2 index 0bc47d8e..1a8caf08 100644 --- a/web/app/templates/corpora/import_corpus.html.j2 +++ b/web/app/templates/corpora/import_corpus.html.j2 @@ -1,4 +1,4 @@ -{% extends "nopaque.html.j2" %} + {% extends "nopaque.html.j2" %} {% from '_colors.html.j2' import colors %} {% import 'materialize/wtf.html.j2' as wtf %} @@ -27,23 +27,23 @@
    - {{ import_corpus_form.hidden_tag() }} + {{ form.hidden_tag() }}
    - {{ wtf.render_field(import_corpus_form.title, data_length='32', material_icon='title') }} + {{ wtf.render_field(form.title, data_length='32', material_icon='title') }}
    - {{ wtf.render_field(import_corpus_form.description, data_length='255', material_icon='description') }} + {{ wtf.render_field(form.description, data_length='255', material_icon='description') }}
    - {{ wtf.render_field(import_corpus_form.file, accept='.zip', placeholder='Choose your exported .zip file') }} + {{ wtf.render_field(form.file, accept='.zip', placeholder='Choose your exported .zip file') }}
    - {{ wtf.render_field(import_corpus_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
    diff --git a/web/app/templates/corpora/query_results/add_query_result.html.j2 b/web/app/templates/corpora/query_results/add_query_result.html.j2 index 97a83983..6ad4ebd6 100644 --- a/web/app/templates/corpora/query_results/add_query_result.html.j2 +++ b/web/app/templates/corpora/query_results/add_query_result.html.j2 @@ -27,21 +27,21 @@
    - {{ add_query_result_form.hidden_tag() }} + {{ form.hidden_tag() }}
    - {{ wtf.render_field(add_query_result_form.title, data_length='32', material_icon='title') }} + {{ wtf.render_field(form.title, data_length='32', material_icon='title') }}
    - {{ wtf.render_field(add_query_result_form.description, data_length='255', material_icon='description') }} + {{ wtf.render_field(form.description, data_length='255', material_icon='description') }}
    - {{ wtf.render_field(add_query_result_form.file, accept='.json', placeholder='Choose your .json file') }} + {{ wtf.render_field(form.file, accept='.json', placeholder='Choose your .json file') }}
    - {{ wtf.render_field(add_query_result_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
    diff --git a/web/app/templates/main/index.html.j2 b/web/app/templates/main/index.html.j2 index 8268de6a..0ded9824 100644 --- a/web/app/templates/main/index.html.j2 +++ b/web/app/templates/main/index.html.j2 @@ -159,20 +159,20 @@
    Log in - {{ login_form.hidden_tag() }} - {{ wtf.render_field(login_form.user, material_icon='person') }} - {{ wtf.render_field(login_form.password, material_icon='vpn_key') }} + {{ form.hidden_tag() }} + {{ wtf.render_field(form.user, material_icon='person') }} + {{ wtf.render_field(form.password, material_icon='vpn_key') }}
    - {{ wtf.render_field(login_form.remember_me) }} + {{ wtf.render_field(form.remember_me) }}
    - {{ wtf.render_field(login_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
    diff --git a/web/app/templates/nopaque.html.j2 b/web/app/templates/nopaque.html.j2 index 81eadb6f..a54fda33 100644 --- a/web/app/templates/nopaque.html.j2 +++ b/web/app/templates/nopaque.html.j2 @@ -231,9 +231,9 @@
    info_outlineAbout and faq - {% if config.CONTACT_EMAIL_ADRESS %} - rate_reviewContact - feedbackFeedback + {% if config.NOPAQUE_CONTACT %} + rate_reviewContact + feedbackFeedback {% endif %} codeGitLab
    diff --git a/web/app/templates/services/file-setup.html.j2 b/web/app/templates/services/file-setup.html.j2 index 31f5e824..2674545c 100644 --- a/web/app/templates/services/file-setup.html.j2 +++ b/web/app/templates/services/file-setup.html.j2 @@ -48,24 +48,24 @@
    - {{ add_job_form.hidden_tag() }} + {{ form.hidden_tag() }}
    - {{ wtf.render_field(add_job_form.title, data_length='32', material_icon='title') }} + {{ wtf.render_field(form.title, data_length='32', material_icon='title') }}
    - {{ wtf.render_field(add_job_form.description, data_length='255', material_icon='description') }} + {{ wtf.render_field(form.description, data_length='255', material_icon='description') }}
    - {{ wtf.render_field(add_job_form.files, accept='image/jpeg, image/png, image/tiff', placeholder='Choose your .jpeg, .png or .tiff files') }} + {{ wtf.render_field(form.files, accept='image/jpeg, image/png, image/tiff', placeholder='Choose your .jpeg, .png or .tiff files') }}
    - {{ wtf.render_field(add_job_form.version, material_icon='apps') }} + {{ wtf.render_field(form.version, material_icon='apps') }}
    - {{ wtf.render_field(add_job_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
    diff --git a/web/app/templates/services/nlp.html.j2 b/web/app/templates/services/nlp.html.j2 index 83d83396..4c5018bc 100644 --- a/web/app/templates/services/nlp.html.j2 +++ b/web/app/templates/services/nlp.html.j2 @@ -66,34 +66,34 @@
    - {{ add_job_form.hidden_tag() }} + {{ form.hidden_tag() }}
    - {{ wtf.render_field(add_job_form.title, data_length='32', material_icon='title') }} + {{ wtf.render_field(form.title, data_length='32', material_icon='title') }}
    - {{ wtf.render_field(add_job_form.description, data_length='255', material_icon='description') }} + {{ wtf.render_field(form.description, data_length='255', material_icon='description') }}
    - {{ wtf.render_field(add_job_form.files, accept='text/plain', placeholder='Choose your .txt files') }} + {{ wtf.render_field(form.files, accept='text/plain', placeholder='Choose your .txt files') }}
    - {{ wtf.render_field(add_job_form.language, material_icon='language') }} + {{ wtf.render_field(form.language, material_icon='language') }}
    - {{ wtf.render_field(add_job_form.version, material_icon='apps') }} + {{ wtf.render_field(form.version, material_icon='apps') }}
    Preprocessing
    -

    {{ add_job_form.check_encoding.label.text }}

    +

    {{ form.check_encoding.label.text }}

    If the input files are not created with the nopaque OCR service or you do not know if your text files are UTF-8 encoded, check this switch. We will try to automatically determine the right encoding for your texts to process them.

    @@ -107,7 +107,7 @@
    - {{ wtf.render_field(add_job_form.submit, material_icon='send') }} + {{ wtf.render_field(form.submit, material_icon='send') }}
    diff --git a/web/app/templates/services/ocr.html.j2 b/web/app/templates/services/ocr.html.j2 index 00608e0d..09759e0c 100644 --- a/web/app/templates/services/ocr.html.j2 +++ b/web/app/templates/services/ocr.html.j2 @@ -48,34 +48,34 @@
    - {{ add_job_form.hidden_tag() }} + {{ form.hidden_tag() }}
    - {{ wtf.render_field(add_job_form.title, data_length='32', material_icon='title') }} + {{ wtf.render_field(form.title, data_length='32', material_icon='title') }}
    - {{ wtf.render_field(add_job_form.description, data_length='255', material_icon='description') }} + {{ wtf.render_field(form.description, data_length='255', material_icon='description') }}
    - {{ wtf.render_field(add_job_form.files, accept='application/pdf', color=ocr_color_darken, placeholder='Choose your .pdf files') }} + {{ wtf.render_field(form.files, accept='application/pdf', color=ocr_color_darken, placeholder='Choose your .pdf files') }}
    - {{ wtf.render_field(add_job_form.language, material_icon='language') }} + {{ wtf.render_field(form.language, material_icon='language') }}
    - {{ wtf.render_field(add_job_form.version, material_icon='apps') }} + {{ wtf.render_field(form.version, material_icon='apps') }}
    Preprocessing
    -

    {{ add_job_form.binarization.label.text }}

    +

    {{ form.binarization.label.text }}

    Based on a brightness threshold pixels are converted into either black or white. It is useful to reduce noise in images. (longer duration)

    @@ -134,7 +134,7 @@
    - {{ wtf.render_field(add_job_form.submit, color=ocr_color_darken, material_icon='send') }} + {{ wtf.render_field(form.submit, color=ocr_color_darken, material_icon='send') }}
    diff --git a/web/app/templates/tasks/email/notification.html.j2 b/web/app/templates/tasks/email/notification.html.j2 index 79f0e2dd..1aac0bf7 100644 --- a/web/app/templates/tasks/email/notification.html.j2 +++ b/web/app/templates/tasks/email/notification.html.j2 @@ -1,9 +1,8 @@ -

    Dear {{ user.username }},

    +

    Dear {{ job.creator.username }},

    -

    The status of your Job/Corpus({{ job.id }}) with the title "{{ job.title }}" has changed!

    +

    The status of your Job "{{ job.title }}" has changed!

    It is now {{ job.status }}!

    -

    Time of this status update was: {time} UTC

    -

    You can access your Job/Corpus here: {{ url_for('jobs.job', job_id=job.id) }}

    +

    You can access your Job here: {{ url_for('jobs.job', job_id=job.id) }}

    Kind regards!
    Your nopaque team

    diff --git a/web/app/templates/tasks/email/notification.txt.j2 b/web/app/templates/tasks/email/notification.txt.j2 index 25d797c8..03012b3e 100644 --- a/web/app/templates/tasks/email/notification.txt.j2 +++ b/web/app/templates/tasks/email/notification.txt.j2 @@ -1,10 +1,9 @@ -Dear {{ user.username }}, +Dear {{ job.creator.username }}, -The status of your Job/Corpus({{ job.id }}) with the title "{{ job.title }}" has changed! +The status of your Job "{{ job.title }}" has changed! It is now {{ job.status }}! -Time of this status update was: {time} UTC -You can access your Job/Corpus here: {{ url_for('jobs.job', job_id=job.id) }} +You can access your Job here: {{ url_for('jobs.job', job_id=job.id) }} Kind regards! Your nopaque team diff --git a/web/boot.sh b/web/boot.sh index f39bb4c8..9c87cfd1 100755 --- a/web/boot.sh +++ b/web/boot.sh @@ -1,5 +1,6 @@ #!/bin/bash source venv/bin/activate + while true; do flask deploy if [[ "$?" == "0" ]]; then diff --git a/web/config.py b/web/config.py index 4ca3704f..97e07697 100644 --- a/web/config.py +++ b/web/config.py @@ -7,103 +7,96 @@ ROOT_DIR = os.path.abspath(os.path.dirname(__file__)) class Config: - ''' # Cookies # ''' - REMEMBER_COOKIE_HTTPONLY = True - REMEMBER_COOKIE_SECURE = os.environ.get( - 'NOPAQUE_REMEMBER_COOKIE_SECURE', 'false').lower() == 'true' - SESSION_COOKIE_SECURE = os.environ.get( - 'NOPAQUE_SESSION_COOKIE_SECURE', 'false').lower() == 'true' + ''' # Flask # ''' + SECRET_KEY = os.environ.get('SECRET_KEY', 'hard to guess string') + SESSION_COOKIE_SECURE = \ + os.environ.get('SESSION_COOKIE_SECURE', 'false').lower() == 'true' - ''' # Database # ''' + ''' # Flask-Login # ''' + REMEMBER_COOKIE_HTTPONLY = True + REMEMBER_COOKIE_SECURE = \ + os.environ.get('REMEMBER_COOKIE_SECURE', 'false').lower() == 'true' + + ''' # Flask-Mail # ''' + MAIL_DEFAULT_SENDER = os.environ.get('MAIL_DEFAULT_SENDER') + MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD') + MAIL_PORT = int(os.environ.get('MAIL_PORT')) + MAIL_SERVER = os.environ.get('MAIL_SERVER') + MAIL_USERNAME = os.environ.get('MAIL_USERNAME') + MAIL_USE_SSL = os.environ.get('MAIL_USE_SSL', 'false').lower() == 'true' + MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS', 'false').lower() == 'true' + + ''' # Flask-SQLAlchemy # ''' SQLALCHEMY_RECORD_QUERIES = True SQLALCHEMY_TRACK_MODIFICATIONS = False - ''' # Email # ''' - MAIL_DEFAULT_SENDER = os.environ.get('NOPAQUE_SMTP_DEFAULT_SENDER') - MAIL_PASSWORD = os.environ.get('NOPAQUE_SMTP_PASSWORD') - MAIL_PORT = int(os.environ.get('NOPAQUE_SMTP_PORT')) - MAIL_SERVER = os.environ.get('NOPAQUE_SMTP_SERVER') - MAIL_USERNAME = os.environ.get('NOPAQUE_SMTP_USERNAME') - MAIL_USE_SSL = os.environ.get( - 'NOPAQUE_SMTP_USE_SSL', 'false').lower() == 'true' - MAIL_USE_TLS = os.environ.get( - 'NOPAQUE_SMTP_USE_TLS', 'false').lower() == 'true' - - ''' # General # ''' - ADMIN_EMAIL_ADRESS = os.environ.get('NOPAQUE_ADMIN_EMAIL_ADRESS') - ALLOWED_USERNAME_REGEX = '^[A-Za-zÄÖÜäöüß0-9_.]*$' - CONTACT_EMAIL_ADRESS = os.environ.get('NOPAQUE_CONTACT_EMAIL_ADRESS') - DATA_DIR = os.environ.get('NOPAQUE_DATA_DIR', '/mnt/nopaque') - SECRET_KEY = os.environ.get('NOPAQUE_SECRET_KEY', 'hard to guess string') - - ''' # Logging # ''' - LOG_DATE_FORMAT = os.environ.get('NOPAQUE_LOG_DATE_FORMAT', - '%Y-%m-%d %H:%M:%S') - LOG_FILE = os.environ.get('NOPAQUE_LOG_FILE', - os.path.join(ROOT_DIR, 'nopaque.log')) - LOG_FORMAT = os.environ.get( - 'NOPAQUE_LOG_FORMAT', - '[%(asctime)s] %(levelname)s in ' - '%(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s' - ) - LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL', 'WARNING') - - ''' # Message queue # ''' - SOCKETIO_MESSAGE_QUEUE_URI = os.environ.get( - 'NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI') - - ''' # Proxy fix # ''' - PROXY_FIX_X_FOR = int(os.environ.get('NOPAQUE_PROXY_FIX_X_FOR', '0')) - PROXY_FIX_X_HOST = int(os.environ.get('NOPAQUE_PROXY_FIX_X_HOST', '0')) - PROXY_FIX_X_PORT = int(os.environ.get('NOPAQUE_PROXY_FIX_X_PORT', '0')) - PROXY_FIX_X_PREFIX = int(os.environ.get('NOPAQUE_PROXY_FIX_X_PREFIX', '0')) - PROXY_FIX_X_PROTO = int(os.environ.get('NOPAQUE_PROXY_FIX_X_PROTO', '0')) + ''' # nopaque # ''' + NOPAQUE_ADMIN = os.environ.get('NOPAQUE_ADMIN') + NOPAQUE_CONTACT = os.environ.get('NOPAQUE_CONTACT') + NOPAQUE_DATA_DIR = os.environ.get('NOPAQUE_DATA_DIR', '/mnt/nopaque') + NOPAQUE_MAIL_SUBJECT_PREFIX = '[nopaque]' + NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI = \ + os.environ.get('NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI') + NOPAQUE_USERNAME_REGEX = '^[A-Za-zÄÖÜäöüß0-9_.]*$' @classmethod def init_app(cls, app): - # Set up logging according to the corresponding (LOG_*) variables - logging.basicConfig(datefmt=cls.LOG_DATE_FORMAT, - filename=cls.LOG_FILE, - format=cls.LOG_FORMAT, - level=cls.LOG_LEVEL) + # Set up logging according to the corresponding (NOPAQUE_LOG_*) + # environment variables + basic_config_kwargs = { + 'datefmt': os.environ.get('NOPAQUE_LOG_DATE_FORMAT', + '%Y-%m-%d %H:%M:%S'), + 'filename': os.environ.get('NOPAQUE_LOG_FILE', + os.path.join(ROOT_DIR, 'nopaque.log')), + 'format': os.environ.get( + 'NOPAQUE_LOG_FORMAT', + '[%(asctime)s] %(levelname)s in ' + '%(pathname)s (function: %(funcName)s, line: %(lineno)d): ' + '%(message)s' + ), + 'level': os.environ.get('NOPAQUE_LOG_LEVEL', 'WARNING') + } + logging.basicConfig(**basic_config_kwargs) # Set up and apply the ProxyFix middleware according to the - # corresponding (PROXY_FIX_*) variables - app.wsgi_app = ProxyFix(app.wsgi_app, - x_for=cls.PROXY_FIX_X_FOR, - x_host=cls.PROXY_FIX_X_HOST, - x_port=cls.PROXY_FIX_X_PORT, - x_prefix=cls.PROXY_FIX_X_PREFIX, - x_proto=cls.PROXY_FIX_X_PROTO) + # corresponding (NOPAQUE_PROXY_FIX_*) environment variables + proxy_fix_kwargs = { + 'x_for': int(os.environ.get('NOPAQUE_PROXY_FIX_X_FOR', '0')), + 'x_host': int(os.environ.get('NOPAQUE_PROXY_FIX_X_HOST', '0')), + 'x_port': int(os.environ.get('NOPAQUE_PROXY_FIX_X_PORT', '0')), + 'x_prefix': int(os.environ.get('NOPAQUE_PROXY_FIX_X_PREFIX', '0')), + 'x_proto': int(os.environ.get('NOPAQUE_PROXY_FIX_X_PROTO', '0')) + } + app.wsgi_app = ProxyFix(app.wsgi_app, **proxy_fix_kwargs) class DevelopmentConfig(Config): - ''' # Database # ''' + ''' # Flask # ''' + DEBUG = True + + ''' # Flask-SQLAlchemy # ''' SQLALCHEMY_DATABASE_URI = os.environ.get( - 'NOPAQUE_DEV_DATABASE_URL', + 'SQLALCHEMY_DATABASE_URI', 'postgresql://nopaque:nopaque@db/nopaque_dev' ) - ''' # General # ''' - DEBUG = True - class ProductionConfig(Config): - ''' # Database # ''' + ''' # Flask-SQLAlchemy # ''' SQLALCHEMY_DATABASE_URI = os.environ.get( - 'NOPAQUE_DATABASE_URL', 'postgresql://nopaque:nopaque@db/nopaque') + 'SQLALCHEMY_DATABASE_URI', 'postgresql://nopaque:nopaque@db/nopaque') class TestingConfig(Config): - ''' # Database # ''' - SQLALCHEMY_DATABASE_URI = os.environ.get( - 'NOPAQUE_TEST_DATABASE_URL', - 'postgresql://nopaque:nopaque@db/nopaque_test' - ) - - ''' # General # ''' + ''' # Flask # ''' TESTING = True WTF_CSRF_ENABLED = False + ''' # Flask-SQLAlchemy # ''' + SQLALCHEMY_DATABASE_URI = os.environ.get( + 'SQLALCHEMY_DATABASE_URI', + 'postgresql://nopaque:nopaque@db/nopaque_test' + ) + config = {'development': DevelopmentConfig, 'production': ProductionConfig, diff --git a/web/nopaque.py b/web/nopaque.py index 43d69c38..5c5c5af5 100644 --- a/web/nopaque.py +++ b/web/nopaque.py @@ -17,8 +17,7 @@ if os.path.exists(DOTENV_FILE): from app import create_app, db, socketio # noqa from app.models import (Corpus, CorpusFile, Job, JobInput, JobResult, - NotificationData, NotificationEmailData, QueryResult, - Role, User) # noqa + QueryResult, Role, User) # noqa from flask_migrate import Migrate, upgrade # noqa @@ -34,8 +33,6 @@ def make_shell_context(): 'Job': Job, 'JobInput': JobInput, 'JobResult': JobResult, - 'NotificationData': NotificationData, - 'NotificationEmailData': NotificationEmailData, 'QueryResult': QueryResult, 'Role': Role, 'User': User} @@ -53,9 +50,9 @@ def deploy(): @app.cli.command() def tasks(): - from app.tasks import process_corpora, process_jobs - process_corpora() - process_jobs() + from app.tasks import check_corpora, check_jobs + check_corpora() + check_jobs() @app.cli.command() From f3f6612a575e9ef2107c942653ba93c00b927397 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Fri, 13 Nov 2020 13:33:32 +0100 Subject: [PATCH 08/52] daemon is now tasks --- web/app/models.py | 11 +-- web/app/services/views.py | 4 +- web/app/tasks/__init__.py | 7 ++ web/app/tasks/job_utils.py | 84 ++++++++++++------- .../tasks/email/notification.html.j2 | 2 +- .../templates/tasks/email/notification.txt.j2 | 2 +- 6 files changed, 67 insertions(+), 43 deletions(-) diff --git a/web/app/models.py b/web/app/models.py index 6af02a2e..fc12610d 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -4,7 +4,6 @@ from flask_login import UserMixin, AnonymousUserMixin from itsdangerous import BadSignature, TimedJSONWebSignatureSerializer from time import sleep from werkzeug.security import generate_password_hash, check_password_hash -from werkzeug.utils import secure_filename import xml.etree.ElementTree as ET from . import db, login_manager import logging @@ -180,7 +179,7 @@ class User(UserMixin, db.Model): def __init__(self, **kwargs): super(User, self).__init__(**kwargs) if self.role is None: - if self.email == current_app.config['ADMIN_EMAIL_ADRESS']: + if self.email == current_app.config['NOPAQUE_ADMIN']: self.role = Role.query.filter_by(name='Administrator').first() if self.role is None: self.role = Role.query.filter_by(default=True).first() @@ -340,8 +339,6 @@ class Job(db.Model): end_date = db.Column(db.DateTime()) mem_mb = db.Column(db.Integer) n_cores = db.Column(db.Integer) - # This is used for zip creation - secure_filename = db.Column(db.String(32)) service = db.Column(db.String(64)) ''' ' Service specific arguments as string list. @@ -367,12 +364,6 @@ class Job(db.Model): ''' return ''.format(self.title) - def create_secure_filename(self): - ''' - Takes the job.title string nad cratesa a secure filename from this. - ''' - self.secure_filename = secure_filename(self.title) - def delete(self): ''' Delete the job and its inputs and results from the database. diff --git a/web/app/services/views.py b/web/app/services/views.py index a6567985..57aaef91 100644 --- a/web/app/services/views.py +++ b/web/app/services/views.py @@ -50,8 +50,6 @@ def service(service): service=service, service_args=json.dumps(service_args), service_version=form.version.data, status='preparing', title=form.title.data) - if job.service != 'corpus_analysis': - job.create_secure_filename() db.session.add(job) db.session.commit() try: @@ -66,7 +64,7 @@ def service(service): else: for file in form.files.data: filename = secure_filename(file.filename) - job_input = JobInput(dir=job.path, filename=filename, job=job) + job_input = JobInput(filename=filename, job=job) file.save(job_input.path) db.session.add(job_input) job.status = 'submitted' diff --git a/web/app/tasks/__init__.py b/web/app/tasks/__init__.py index 9bd21af6..b496975c 100644 --- a/web/app/tasks/__init__.py +++ b/web/app/tasks/__init__.py @@ -21,9 +21,16 @@ def check_corpora(): def check_jobs(): + print('check_jobs()') jobs = Job.query.all() + print([job.status for job in jobs]) for job in filter(lambda job: job.status == 'submitted', jobs): + print('pre create_job_service({})'.format(job)) job_utils.create_job_service(job) for job in filter(lambda job: job.status in ['queued', 'running'], jobs): + print('pre checkout_job_service({})'.format(job)) job_utils.checkout_job_service(job) + for job in filter(lambda job: job.status == 'canceling', jobs): + print('pre remove_job_service({})'.format(job)) + job_utils.remove_job_service(job) db.session.commit() diff --git a/web/app/tasks/job_utils.py b/web/app/tasks/job_utils.py index 68db9507..d5958137 100644 --- a/web/app/tasks/job_utils.py +++ b/web/app/tasks/job_utils.py @@ -1,7 +1,8 @@ from datetime import datetime +from werkzeug.utils import secure_filename from . import docker_client -from .. import db -from ..email import create_message, send +from .. import db, mail +from ..email import create_message from ..models import JobResult import docker import logging @@ -10,11 +11,12 @@ import os def create_job_service(job): + print('create_job_service({})'.format(job)) cmd = '{} -i /files -o /files/output'.format(job.service) if job.service == 'file-setup': - cmd += ' -f {}'.format(job.secure_filename) + cmd += ' -f {}'.format(secure_filename(job.title)) cmd += ' --log-dir /files' - cmd += ' --zip [{}]_{}'.format(job.service, job.secure_filename) + cmd += ' --zip [{}]_{}'.format(job.service, secure_filename(job.title)) cmd += ' ' + ' '.join(json.loads(job.service_args)) service_kwargs = {'command': cmd, 'constraints': ['node.role==worker'], @@ -36,15 +38,18 @@ def create_job_service(job): logging.error('Create "{}" service raised '.format(service_kwargs['name']) # noqa + '[docker-APIError] The server returned an error. ' + 'Details: {}'.format(e)) + return else: job.status = 'queued' + finally: + print('Sending email to {}'.format(job.creator.email)) msg = create_message( job.creator.email, 'Status update for your Job "{}"'.format(job.title), 'tasks/email/notification', job=job ) - send(msg) + mail.send(msg) def checkout_job_service(job): @@ -60,10 +65,12 @@ def checkout_job_service(job): logging.error('Get "{}" service raised '.format(service_name) + '[docker-APIError] The server returned an error. ' + 'Details: {}'.format(e)) + return except docker.errors.InvalidVersion: logging.error('Get "{}" service raised '.format(service_name) + '[docker-InvalidVersion] One of the arguments is ' + 'not supported with the current API version.') + return else: service_tasks = service.tasks() if not service_tasks: @@ -71,31 +78,33 @@ def checkout_job_service(job): task_state = service_tasks[0].get('Status').get('State') if job.status == 'queued' and task_state != 'pending': job.status = 'running' - elif job.status == 'running' and task_state == 'complete': - service.remove() - job.end_date = datetime.utcnow() - job.status = task_state - if task_state == 'complete': - job_results_dir = os.path.join(job.path, 'output') - job_results = filter(lambda x: x.endswith('.zip'), - os.listdir(job_results_dir)) - for job_result in job_results: - job_result = JobResult(filename=job_result, job=job) - db.session.add(job_result) - elif job.status == 'running' and task_state == 'failed': - service.remove() - job.end_date = datetime.utcnow() - job.status = task_state + elif job.status == 'running' and task_state in ['complete', 'failed']: # noqa + try: + service.remove() + except docker.errors.APIError as e: + logging.error('Remove "{}" service raised '.format(service_name) # noqa + + '[docker-APIError] The server returned an error. ' # noqa + + 'Details: {}'.format(e)) + return + else: + job.end_date = datetime.utcnow() + job.status = task_state + if task_state == 'complete': + job_results_dir = os.path.join(job.path, 'output') + job_results = filter(lambda x: x.endswith('.zip'), + os.listdir(job_results_dir)) + for job_result in job_results: + job_result = JobResult(filename=job_result, job=job) + db.session.add(job_result) finally: - # TODO: send email + print('Sending email to {}'.format(job.creator.email)) msg = create_message( job.creator.email, - '[nopaque] Status update for your Job "{}"'.format(job.title), + 'Status update for your Job "{}"'.format(job.title), 'tasks/email/notification', job=job ) - send(msg) - pass + mail.send(msg) def remove_job_service(job): @@ -103,9 +112,28 @@ def remove_job_service(job): try: service = docker_client.services.get(service_name) except docker.errors.NotFound: - # TODO: send email job.status = 'canceled' - # TODO: handle docker.errors.APIError and docker.errors.InvalidVersion + except docker.errors.APIError as e: + logging.error('Get "{}" service raised '.format(service_name) + + '[docker-APIError] The server returned an error. ' + + 'Details: {}'.format(e)) + return + except docker.errors.InvalidVersion: + logging.error('Get "{}" service raised '.format(service_name) + + '[docker-InvalidVersion] One of the arguments is ' + + 'not supported with the current API version.') + return else: - service.update(mounts=None) - service.remove() + try: + service.update(mounts=None) + except docker.errors.APIError as e: + logging.error('Update "{}" service raised '.format(service_name) # noqa + + '[docker-APIError] The server returned an error. ' # noqa + + 'Details: {}'.format(e)) + return + try: + service.remove() + except docker.errors.APIError as e: + logging.error('Remove "{}" service raised '.format(service_name) # noqa + + '[docker-APIError] The server returned an error. ' # noqa + + 'Details: {}'.format(e)) diff --git a/web/app/templates/tasks/email/notification.html.j2 b/web/app/templates/tasks/email/notification.html.j2 index 1aac0bf7..4e25ab4e 100644 --- a/web/app/templates/tasks/email/notification.html.j2 +++ b/web/app/templates/tasks/email/notification.html.j2 @@ -3,6 +3,6 @@

    The status of your Job "{{ job.title }}" has changed!

    It is now {{ job.status }}!

    -

    You can access your Job here: {{ url_for('jobs.job', job_id=job.id) }}

    +

    You can access your Job here:

    Kind regards!
    Your nopaque team

    diff --git a/web/app/templates/tasks/email/notification.txt.j2 b/web/app/templates/tasks/email/notification.txt.j2 index 03012b3e..7634ee3a 100644 --- a/web/app/templates/tasks/email/notification.txt.j2 +++ b/web/app/templates/tasks/email/notification.txt.j2 @@ -3,7 +3,7 @@ Dear {{ job.creator.username }}, The status of your Job "{{ job.title }}" has changed! It is now {{ job.status }}! -You can access your Job here: {{ url_for('jobs.job', job_id=job.id) }} +You can access your Job here: Kind regards! Your nopaque team From 5427698a47391d14316a0e3c1f52191ae0ef4985 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Fri, 13 Nov 2020 13:54:07 +0100 Subject: [PATCH 09/52] Remove debog print statements --- web/app/tasks/__init__.py | 5 ----- web/app/tasks/job_utils.py | 3 --- 2 files changed, 8 deletions(-) diff --git a/web/app/tasks/__init__.py b/web/app/tasks/__init__.py index b496975c..154841b7 100644 --- a/web/app/tasks/__init__.py +++ b/web/app/tasks/__init__.py @@ -21,16 +21,11 @@ def check_corpora(): def check_jobs(): - print('check_jobs()') jobs = Job.query.all() - print([job.status for job in jobs]) for job in filter(lambda job: job.status == 'submitted', jobs): - print('pre create_job_service({})'.format(job)) job_utils.create_job_service(job) for job in filter(lambda job: job.status in ['queued', 'running'], jobs): - print('pre checkout_job_service({})'.format(job)) job_utils.checkout_job_service(job) for job in filter(lambda job: job.status == 'canceling', jobs): - print('pre remove_job_service({})'.format(job)) job_utils.remove_job_service(job) db.session.commit() diff --git a/web/app/tasks/job_utils.py b/web/app/tasks/job_utils.py index d5958137..7a4e2109 100644 --- a/web/app/tasks/job_utils.py +++ b/web/app/tasks/job_utils.py @@ -11,7 +11,6 @@ import os def create_job_service(job): - print('create_job_service({})'.format(job)) cmd = '{} -i /files -o /files/output'.format(job.service) if job.service == 'file-setup': cmd += ' -f {}'.format(secure_filename(job.title)) @@ -42,7 +41,6 @@ def create_job_service(job): else: job.status = 'queued' finally: - print('Sending email to {}'.format(job.creator.email)) msg = create_message( job.creator.email, 'Status update for your Job "{}"'.format(job.title), @@ -97,7 +95,6 @@ def checkout_job_service(job): job_result = JobResult(filename=job_result, job=job) db.session.add(job_result) finally: - print('Sending email to {}'.format(job.creator.email)) msg = create_message( job.creator.email, 'Status update for your Job "{}"'.format(job.title), From 19338ba8d5d37b11b901ab577bd6431836541d93 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Fri, 13 Nov 2020 15:01:53 +0100 Subject: [PATCH 10/52] Add daemon loop --- .env.tpl | 4 ++++ web/Dockerfile | 1 - web/app/models.py | 2 +- web/boot.sh | 29 +++++++++++++++++------------ web/nopaque-daemon.sh | 7 +++++++ 5 files changed, 29 insertions(+), 14 deletions(-) create mode 100755 web/nopaque-daemon.sh diff --git a/.env.tpl b/.env.tpl index e7c2cea7..ee8a68af 100644 --- a/.env.tpl +++ b/.env.tpl @@ -111,6 +111,10 @@ NOPAQUE_ADMIN= # Swarm nodes # NOPAQUE_DATA_DIR= +# CHOOSE ONE: False, True +# DEFAULT: False +# NOPAQUE_DAEMON_ENABLED= + # DEFAULT: 0.0.0.0 # NOPAQUE_HOST= diff --git a/web/Dockerfile b/web/Dockerfile index 3681b701..d5de0392 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -17,7 +17,6 @@ RUN apt-get update \ && apt-get install --no-install-recommends --yes \ build-essential \ libpq-dev \ - wait-for-it \ && rm -r /var/lib/apt/lists/* diff --git a/web/app/models.py b/web/app/models.py index fc12610d..e912bf17 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -35,7 +35,7 @@ class Role(db.Model): # Fields default = db.Column(db.Boolean, default=False, index=True) name = db.Column(db.String(64), unique=True) - permissions = db.Column(db.BigInteger) + permissions = db.Column(db.Integer) # Relationships users = db.relationship('User', backref='role', lazy='dynamic') diff --git a/web/boot.sh b/web/boot.sh index 9c87cfd1..d6536518 100755 --- a/web/boot.sh +++ b/web/boot.sh @@ -1,21 +1,26 @@ #!/bin/bash + +if [[ "${NOPAQUE_DAEMON_ENABLED}" == "True" ]]; then + echo "Starting nopaque daemon..." + ./nopaque-daemon.sh & +fi + source venv/bin/activate -while true; do - flask deploy - if [[ "$?" == "0" ]]; then - break - fi - echo Deploy command failed, retrying in 5 secs... - sleep 5 -done - -if [[ "$#" -eq 0 ]]; then +if [[ "${#}" -eq 0 ]]; then + while true; do + flask deploy + if [[ "${?}" == "0" ]]; then + break + fi + echo Deploy command failed, retrying in 5 secs... + sleep 5 + done python nopaque.py -elif [[ "$1" == "flask" ]]; then +elif [[ "${1}" == "flask" ]]; then exec ${@:1} else - echo "$0 [COMMAND]" + echo "${0} [COMMAND]" echo "" echo "nopaque startup script" echo "" diff --git a/web/nopaque-daemon.sh b/web/nopaque-daemon.sh new file mode 100755 index 00000000..7a903646 --- /dev/null +++ b/web/nopaque-daemon.sh @@ -0,0 +1,7 @@ +#!/bin/bash +source venv/bin/activate + +while true; do + flask tasks + sleep 10 +done From 7e84b544677b659bcc5fb4b7a4b17a3a40646921 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Tue, 17 Nov 2020 10:16:40 +0100 Subject: [PATCH 11/52] Add config variables in order to create links outside of a request context --- .env.tpl | 9 +++++++++ docker-compose.traefik.yml | 4 ++-- web/app/templates/tasks/email/notification.html.j2 | 2 +- web/app/templates/tasks/email/notification.txt.j2 | 2 +- 4 files changed, 13 insertions(+), 4 deletions(-) diff --git a/.env.tpl b/.env.tpl index e7c2cea7..d88f1da5 100644 --- a/.env.tpl +++ b/.env.tpl @@ -31,10 +31,19 @@ HOST_DOCKER_GID= # Flask # # https://flask.palletsprojects.com/en/1.1.x/config/ # ################################################################################ +# CHOOSE ONE: http, https +# DEFAULT: http +# PREFERRED_URL_SCHEME= + # DEFAULT: hard to guess string # HINT: Use this bash command `python -c "import uuid; print(uuid.uuid4().hex)"` # SECRET_KEY= +# Example: nopaque.example.com nopaque.example.com:5000 +# HINT: If your instance is publicly available on a different Port then 80/443, +# you will have to add this to the server name +SERVER_NAME= + # CHOOSE ONE: False, True # DEFAULT: False # HINT: Set to true if you redirect http to https diff --git a/docker-compose.traefik.yml b/docker-compose.traefik.yml index 5cefda8f..afd688f9 100644 --- a/docker-compose.traefik.yml +++ b/docker-compose.traefik.yml @@ -18,13 +18,13 @@ services: - "traefik.http.middlewares.nopaque-header.headers.customrequestheaders.X-Forwarded-Proto=http" - "traefik.http.routers.nopaque.entrypoints=web" - "traefik.http.routers.nopaque.middlewares=nopaque-header, redirect-to-https@file" - - "traefik.http.routers.nopaque.rule=Host(``)" + - "traefik.http.routers.nopaque.rule=Host(`${SERVER_NAME}`)" ### ### ### ### - "traefik.http.middlewares.nopaque-secure-header.headers.customrequestheaders.X-Forwarded-Proto=https" - "traefik.http.routers.nopaque-secure.entrypoints=web-secure" - "traefik.http.routers.nopaque-secure.middlewares=hsts-header@file, nopaque-secure-header" - - "traefik.http.routers.nopaque-secure.rule=Host(``)" + - "traefik.http.routers.nopaque-secure.rule=Host(`${SERVER_NAME}`)" - "traefik.http.routers.nopaque-secure.tls.certresolver=" - "traefik.http.routers.nopaque-secure.tls.options=intermediate@file" ### ### diff --git a/web/app/templates/tasks/email/notification.html.j2 b/web/app/templates/tasks/email/notification.html.j2 index 4e25ab4e..1aac0bf7 100644 --- a/web/app/templates/tasks/email/notification.html.j2 +++ b/web/app/templates/tasks/email/notification.html.j2 @@ -3,6 +3,6 @@

    The status of your Job "{{ job.title }}" has changed!

    It is now {{ job.status }}!

    -

    You can access your Job here:

    +

    You can access your Job here: {{ url_for('jobs.job', job_id=job.id) }}

    Kind regards!
    Your nopaque team

    diff --git a/web/app/templates/tasks/email/notification.txt.j2 b/web/app/templates/tasks/email/notification.txt.j2 index 7634ee3a..03012b3e 100644 --- a/web/app/templates/tasks/email/notification.txt.j2 +++ b/web/app/templates/tasks/email/notification.txt.j2 @@ -3,7 +3,7 @@ Dear {{ job.creator.username }}, The status of your Job "{{ job.title }}" has changed! It is now {{ job.status }}! -You can access your Job here: +You can access your Job here: {{ url_for('jobs.job', job_id=job.id) }} Kind regards! Your nopaque team From ddee38e2a5ba619afd6556bdbdc5f6fddfddcd6a Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Tue, 17 Nov 2020 14:58:03 +0100 Subject: [PATCH 12/52] Delete old daemon package and fix typos --- .env.tpl | 5 +- daemon/.dockerignore | 6 - daemon/Dockerfile | 32 ---- daemon/app/__init__.py | 31 ---- daemon/app/decorators.py | 14 -- daemon/app/models.py | 52 ------- daemon/app/tasks/__init__.py | 0 daemon/app/tasks/check_corpora.py | 140 ----------------- daemon/app/tasks/check_jobs.py | 147 ------------------ daemon/app/tasks/libnotify/__init__.py | 0 daemon/app/tasks/libnotify/notification.py | 28 ---- daemon/app/tasks/libnotify/service.py | 16 -- .../libnotify/templates/notification.html | 15 -- .../libnotify/templates/notification.txt | 10 -- daemon/app/tasks/notify.py | 111 ------------- daemon/boot.sh | 3 - daemon/config.py | 71 --------- daemon/nopaqued.py | 13 -- daemon/requirements.txt | 4 - web/boot.sh | 4 +- 20 files changed, 6 insertions(+), 696 deletions(-) delete mode 100644 daemon/.dockerignore delete mode 100644 daemon/Dockerfile delete mode 100644 daemon/app/__init__.py delete mode 100644 daemon/app/decorators.py delete mode 100644 daemon/app/models.py delete mode 100644 daemon/app/tasks/__init__.py delete mode 100644 daemon/app/tasks/check_corpora.py delete mode 100644 daemon/app/tasks/check_jobs.py delete mode 100644 daemon/app/tasks/libnotify/__init__.py delete mode 100644 daemon/app/tasks/libnotify/notification.py delete mode 100644 daemon/app/tasks/libnotify/service.py delete mode 100644 daemon/app/tasks/libnotify/templates/notification.html delete mode 100644 daemon/app/tasks/libnotify/templates/notification.txt delete mode 100644 daemon/app/tasks/notify.py delete mode 100755 daemon/boot.sh delete mode 100644 daemon/config.py delete mode 100644 daemon/nopaqued.py delete mode 100644 daemon/requirements.txt diff --git a/.env.tpl b/.env.tpl index 54165fce..25884399 100644 --- a/.env.tpl +++ b/.env.tpl @@ -39,7 +39,7 @@ HOST_DOCKER_GID= # HINT: Use this bash command `python -c "import uuid; print(uuid.uuid4().hex)"` # SECRET_KEY= -# Example: nopaque.example.com nopaque.example.com:5000 +# Example: nopaque.example.com/nopaque.example.com:5000 # HINT: If your instance is publicly available on a different Port then 80/443, # you will have to add this to the server name SERVER_NAME= @@ -124,9 +124,12 @@ NOPAQUE_ADMIN= # DEFAULT: False # NOPAQUE_DAEMON_ENABLED= +# The hostname or IP address for the server to listen on. +# HINT: To use a domain locally, add any names that should route to the app to your hosts file. # DEFAULT: 0.0.0.0 # NOPAQUE_HOST= +# The port number for the server to listen on. # DEFAULT: 5000 # NOPAQUE_PORT= diff --git a/daemon/.dockerignore b/daemon/.dockerignore deleted file mode 100644 index 21803000..00000000 --- a/daemon/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -# Docker related files -Dockerfile -.dockerignore - -# Packages -__pycache__ diff --git a/daemon/Dockerfile b/daemon/Dockerfile deleted file mode 100644 index be9a5d74..00000000 --- a/daemon/Dockerfile +++ /dev/null @@ -1,32 +0,0 @@ -FROM python:3.9.0-slim-buster - - -LABEL authors="Patrick Jentsch , Stephan Porada " - - -ARG DOCKER_GID -ARG GID -ARG UID -ENV LANG=C.UTF-8 - - -RUN apt-get update \ - && apt-get install --no-install-recommends --yes \ - build-essential \ - libpq-dev \ - && rm -r /var/lib/apt/lists/* - - -RUN groupadd --gid ${DOCKER_GID} --system docker \ - && groupadd --gid ${GID} --system nopaqued \ - && useradd --create-home --gid ${GID} --groups ${DOCKER_GID} --no-log-init --system --uid ${UID} nopaqued -USER nopaqued -WORKDIR /home/nopaqued - - -COPY --chown=nopaqued:nopaqued [".", "."] -RUN python -m venv venv \ - && venv/bin/pip install --requirement requirements.txt - - -ENTRYPOINT ["./boot.sh"] diff --git a/daemon/app/__init__.py b/daemon/app/__init__.py deleted file mode 100644 index eaccafd2..00000000 --- a/daemon/app/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -from config import config -from sqlalchemy import create_engine -from sqlalchemy.orm import scoped_session, sessionmaker -from time import sleep -import docker -import os - - -configuration = config[os.environ.get('NOPAQUE_CONFIG', 'development')] -configuration.init() -docker_client = docker.from_env() -engine = create_engine(configuration.SQLALCHEMY_DATABASE_URI) -Session = scoped_session(sessionmaker(bind=engine)) - - -def run(): - from .tasks.check_corpora import check_corpora - check_corpora_thread = check_corpora() - from .tasks.check_jobs import check_jobs - check_jobs_thread = check_jobs() - from .tasks.notify import notify - notify_thread = notify() - - while True: - if not check_corpora_thread.is_alive(): - check_corpora_thread = check_corpora() - if not check_jobs_thread.is_alive(): - check_jobs_thread = check_jobs() - if not notify_thread.is_alive(): - notify_thread = notify() - sleep(3) diff --git a/daemon/app/decorators.py b/daemon/app/decorators.py deleted file mode 100644 index 040250a8..00000000 --- a/daemon/app/decorators.py +++ /dev/null @@ -1,14 +0,0 @@ -from functools import wraps -from threading import Thread - - -def background(f): - ''' - ' This decorator executes a function in a Thread. - ''' - @wraps(f) - def wrapped(*args, **kwargs): - thread = Thread(target=f, args=args, kwargs=kwargs) - thread.start() - return thread - return wrapped diff --git a/daemon/app/models.py b/daemon/app/models.py deleted file mode 100644 index 1f113142..00000000 --- a/daemon/app/models.py +++ /dev/null @@ -1,52 +0,0 @@ -from sqlalchemy.ext.automap import automap_base -from sqlalchemy.orm import relationship -from . import engine - - -Base = automap_base() - - -# Classes for database models -class Corpus(Base): - __tablename__ = 'corpora' - files = relationship('CorpusFile', collection_class=set) - - -class CorpusFile(Base): - __tablename__ = 'corpus_files' - - -class Job(Base): - __tablename__ = 'jobs' - inputs = relationship('JobInput', collection_class=set) - results = relationship('JobResult', collection_class=set) - notification_data = relationship('NotificationData', collection_class=list) - notification_email_data = relationship('NotificationEmailData', - collection_class=list) - - -class JobInput(Base): - __tablename__ = 'job_results' - - -class JobResult(Base): - __tablename__ = 'job_results' - - -class NotificationData(Base): - __tablename__ = 'notification_data' - job = relationship('Job', collection_class=set) - - -class NotificationEmailData(Base): - __tablename__ = 'notification_email_data' - job = relationship('Job', collection_class=set) - - -class User(Base): - __tablename__ = 'users' - jobs = relationship('Job', collection_class=set) - corpora = relationship('Corpus', collection_class=set) - - -Base.prepare(engine, reflect=True) diff --git a/daemon/app/tasks/__init__.py b/daemon/app/tasks/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/daemon/app/tasks/check_corpora.py b/daemon/app/tasks/check_corpora.py deleted file mode 100644 index 6ecffea5..00000000 --- a/daemon/app/tasks/check_corpora.py +++ /dev/null @@ -1,140 +0,0 @@ -from .. import configuration as config -from .. import docker_client, Session -from ..decorators import background -from ..models import Corpus -import docker -import logging -import os -import shutil - - -@background -def check_corpora(): - session = Session() - corpora = session.query(Corpus).all() - for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora): - __create_build_corpus_service(corpus) - for corpus in filter(lambda corpus: (corpus.status == 'queued' - or corpus.status == 'running'), - corpora): - __checkout_build_corpus_service(corpus) - for corpus in filter(lambda corpus: corpus.status == 'start analysis', - corpora): - __create_cqpserver_container(corpus) - for corpus in filter(lambda corpus: corpus.status == 'stop analysis', - corpora): - __remove_cqpserver_container(corpus) - session.commit() - Session.remove() - - -def __create_build_corpus_service(corpus): - corpus_dir = os.path.join(config.DATA_DIR, - str(corpus.user_id), - 'corpora', - str(corpus.id)) - corpus_data_dir = os.path.join(corpus_dir, 'data') - corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt') - corpus_registry_dir = os.path.join(corpus_dir, 'registry') - if os.path.exists(corpus_data_dir): - shutil.rmtree(corpus_data_dir) - if os.path.exists(corpus_registry_dir): - shutil.rmtree(corpus_registry_dir) - os.mkdir(corpus_data_dir) - os.mkdir(corpus_registry_dir) - service_args = {'command': 'docker-entrypoint.sh build-corpus', - 'constraints': ['node.role==worker'], - 'labels': {'origin': 'nopaque', - 'type': 'corpus.prepare', - 'corpus_id': str(corpus.id)}, - 'mounts': [corpus_file + ':/root/files/corpus.vrt:ro', - corpus_data_dir + ':/corpora/data:rw', - corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], - 'name': 'build-corpus_{}'.format(corpus.id), - 'restart_policy': docker.types.RestartPolicy()} - service_image = \ - 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' - try: - service = docker_client.services.get(service_args['name']) - except docker.errors.NotFound: - pass - except docker.errors.DockerException: - return - else: - service.remove() - try: - docker_client.services.create(service_image, **service_args) - except docker.errors.DockerException: - corpus.status = 'failed' - else: - corpus.status = 'queued' - - -def __checkout_build_corpus_service(corpus): - service_name = 'build-corpus_{}'.format(corpus.id) - try: - service = docker_client.services.get(service_name) - except docker.errors.NotFound: - logging.error('__checkout_build_corpus_service({}):'.format(corpus.id) - + ' The service does not exist.' - + ' (stauts: {} -> failed)'.format(corpus.status)) - corpus.status = 'failed' - return - except docker.errors.DockerException: - return - service_tasks = service.tasks() - if not service_tasks: - return - task_state = service_tasks[0].get('Status').get('State') - if corpus.status == 'queued' and task_state != 'pending': - corpus.status = 'running' - elif corpus.status == 'running' and task_state == 'complete': - service.remove() - corpus.status = 'prepared' - elif corpus.status == 'running' and task_state == 'failed': - service.remove() - corpus.status = task_state - - -def __create_cqpserver_container(corpus): - corpus_dir = os.path.join(config.DATA_DIR, - str(corpus.user_id), - 'corpora', - str(corpus.id)) - corpus_data_dir = os.path.join(corpus_dir, 'data') - corpus_registry_dir = os.path.join(corpus_dir, 'registry') - container_args = {'command': 'cqpserver', - 'detach': True, - 'volumes': [corpus_data_dir + ':/corpora/data:rw', - corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], - 'name': 'cqpserver_{}'.format(corpus.id), - 'network': 'nopaque_default'} - container_image = \ - 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' - try: - container = docker_client.containers.get(container_args['name']) - except docker.errors.NotFound: - pass - except docker.errors.DockerException: - return - else: - container.remove(force=True) - try: - docker_client.containers.run(container_image, **container_args) - except docker.errors.DockerException: - return - else: - corpus.status = 'analysing' - - -def __remove_cqpserver_container(corpus): - container_name = 'cqpserver_{}'.format(corpus.id) - try: - container = docker_client.containers.get(container_name) - except docker.errors.NotFound: - pass - except docker.errors.DockerException: - return - else: - container.remove(force=True) - corpus.status = 'prepared' diff --git a/daemon/app/tasks/check_jobs.py b/daemon/app/tasks/check_jobs.py deleted file mode 100644 index f5530e1e..00000000 --- a/daemon/app/tasks/check_jobs.py +++ /dev/null @@ -1,147 +0,0 @@ -from datetime import datetime -from .. import configuration as config -from .. import docker_client, Session -from ..decorators import background -from ..models import Job, JobResult, NotificationData, NotificationEmailData -import docker -import logging -import json -import os - - -@background -def check_jobs(): - session = Session() - jobs = session.query(Job).all() - for job in filter(lambda job: job.status == 'submitted', jobs): - __create_job_service(job) - for job in filter(lambda job: job.status == 'queued', jobs): - __checkout_job_service(job, session) - __add_notification_data(job, 'queued', session) - for job in filter(lambda job: job.status == 'running', jobs): - __checkout_job_service(job, session) - __add_notification_data(job, 'running', session) - for job in filter(lambda job: job.status == 'complete', jobs): - __add_notification_data(job, 'complete', session) - for job in filter(lambda job: job.status == 'failed', jobs): - __add_notification_data(job, 'failed', session) - for job in filter(lambda job: job.status == 'canceling', jobs): - __remove_job_service(job) - session.commit() - Session.remove() - - -def __add_notification_data(job, notified_on_status, session): - # checks if user wants any notifications at all - if (job.user.setting_job_status_mail_notifications == 'none'): - return - # checks if user wants only notification on completed jobs - elif (job.user.setting_job_status_mail_notifications == 'end' - and notified_on_status != 'complete'): - return - else: - # check if a job already has associated NotificationData - notification_exists = len(job.notification_data) - # create notification_data for current job if there is none - if (notification_exists == 0): - notification_data = NotificationData(job_id=job.id) - session.add(notification_data) - # If no commit job will have no NotificationData - session.commit() - if (job.notification_data[0].notified_on != notified_on_status): - notification_email_data = NotificationEmailData(job_id=job.id) - notification_email_data.notify_status = notified_on_status - notification_email_data.creation_date = datetime.utcnow() - job.notification_data[0].notified_on = notified_on_status - session.add(notification_email_data) - - -def __create_job_service(job): - job_dir = os.path.join(config.DATA_DIR, - str(job.user_id), - 'jobs', - str(job.id)) - cmd = '{} -i /files -o /files/output'.format(job.service) - if job.service == 'file-setup': - cmd += ' -f {}'.format(job.secure_filename) - cmd += ' --log-dir /files' - cmd += ' --zip [{}]_{}'.format(job.service, job.secure_filename) - cmd += ' ' + ' '.join(json.loads(job.service_args)) - service_args = {'command': cmd, - 'constraints': ['node.role==worker'], - 'labels': {'origin': 'nopaque', - 'type': 'service.{}'.format(job.service), - 'job_id': str(job.id)}, - 'mounts': [job_dir + ':/files:rw'], - 'name': 'job_{}'.format(job.id), - 'resources': docker.types.Resources( - cpu_reservation=job.n_cores * (10 ** 9), - mem_reservation=job.mem_mb * (10 ** 6)), - 'restart_policy': docker.types.RestartPolicy()} - service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/' - + job.service + ':' + job.service_version) - try: - service = docker_client.services.get(service_args['name']) - except docker.errors.NotFound: - pass - except docker.errors.DockerException: - return - else: - service.remove() - try: - docker_client.services.create(service_image, **service_args) - except docker.errors.DockerException: - job.status = 'failed' - else: - job.status = 'queued' - - -def __checkout_job_service(job, session): - service_name = 'job_{}'.format(job.id) - try: - service = docker_client.services.get(service_name) - except docker.errors.NotFound: - logging.error('__checkout_job_service({}): '.format(job.id) - + 'The service does not exist. ' - + '(status: {} -> failed)'.format(job.status)) - job.status = 'failed' - return - except docker.errors.DockerException: - return - service_tasks = service.tasks() - if not service_tasks: - return - task_state = service_tasks[0].get('Status').get('State') - if job.status == 'queued' and task_state != 'pending': - job.status = 'running' - elif (job.status == 'running' - and (task_state == 'complete' or task_state == 'failed')): - service.remove() - job.end_date = datetime.utcnow() - job.status = task_state - if task_state == 'complete': - results_dir = os.path.join(config.DATA_DIR, - str(job.user_id), - 'jobs', - str(job.id), - 'output') - results = filter(lambda x: x.endswith('.zip'), - os.listdir(results_dir)) - for result in results: - job_result = JobResult(dir=results_dir, - filename=result, - job_id=job.id) - session.add(job_result) - - -def __remove_job_service(job): - service_name = 'job_{}'.format(job.id) - try: - service = docker_client.services.get(service_name) - except docker.errors.NotFound: - job.status = 'canceled' - except docker.errors.DockerException: - return - else: - service.update(mounts=None) - service.remove() diff --git a/daemon/app/tasks/libnotify/__init__.py b/daemon/app/tasks/libnotify/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/daemon/app/tasks/libnotify/notification.py b/daemon/app/tasks/libnotify/notification.py deleted file mode 100644 index 488471c3..00000000 --- a/daemon/app/tasks/libnotify/notification.py +++ /dev/null @@ -1,28 +0,0 @@ -from email.message import EmailMessage - - -class Notification(EmailMessage): - """docstring for Email.""" - - def set_notification_content(self, - subject_template, - subject_template_values_dict, - body_txt_template_path, - body_html_template_path, - body_template_values_dict): - # Create subject with subject_template_values_dict - self['subject'] = subject_template.format( - **subject_template_values_dict) - # Open template files and insert values from body_template_values_dict - with open(body_txt_template_path) as nfile: - self.body = nfile.read().format(**body_template_values_dict) - with open(body_html_template_path) as nfile: - self.html = nfile.read().format(**body_template_values_dict) - # Set txt of email - self.set_content(self.body) - # Set html alternative - self.add_alternative(self.html, subtype='html') - - def set_addresses(self, sender, recipient): - self['From'] = sender - self['to'] = recipient diff --git a/daemon/app/tasks/libnotify/service.py b/daemon/app/tasks/libnotify/service.py deleted file mode 100644 index 633fb386..00000000 --- a/daemon/app/tasks/libnotify/service.py +++ /dev/null @@ -1,16 +0,0 @@ -class NotificationService: - """This is a nopaque notifcation service object.""" - - def __init__(self, smtp): - # Bool to show if the mail server stoped sending mails due to exceeding - # its sending limit - self.mail_limit_exceeded = False - # Holds due to an error unsent email notifications - self.not_sent = {} - self.smtp = smtp - - def send(self, email): - self.smtp.send_message(email) - - def quit(self): - self.smtp.quit() diff --git a/daemon/app/tasks/libnotify/templates/notification.html b/daemon/app/tasks/libnotify/templates/notification.html deleted file mode 100644 index e2edfe75..00000000 --- a/daemon/app/tasks/libnotify/templates/notification.html +++ /dev/null @@ -1,15 +0,0 @@ - - -

    Dear {username},

    - -

    The status of your Job/Corpus({id}) with the title "{title}" has changed!

    -

    It is now {status}!

    -

    Time of this status update was: {time} UTC

    - -

    You can access your Job/Corpus here: {url} -

    - -

    Kind regards!
    - Your nopaque team

    - - diff --git a/daemon/app/tasks/libnotify/templates/notification.txt b/daemon/app/tasks/libnotify/templates/notification.txt deleted file mode 100644 index 0e221c54..00000000 --- a/daemon/app/tasks/libnotify/templates/notification.txt +++ /dev/null @@ -1,10 +0,0 @@ -Dear {username}, - -The status of your Job/Corpus({id}) with the title "{title}" has changed! -It is now {status}! -Time of this status update was: {time} UTC - -You can access your Job/Corpus here: {url} - -Kind regards! -Your nopaque team \ No newline at end of file diff --git a/daemon/app/tasks/notify.py b/daemon/app/tasks/notify.py deleted file mode 100644 index 5d3d23f3..00000000 --- a/daemon/app/tasks/notify.py +++ /dev/null @@ -1,111 +0,0 @@ -from sqlalchemy import asc -from .libnotify.notification import Notification -from .libnotify.service import NotificationService -from .. import configuration as config -from .. import Session -from ..decorators import background -from ..models import NotificationEmailData -import logging -import os -import smtplib - - -ROOT_DIR = os.path.abspath(os.path.dirname(__file__)) - - -@background -def notify(): - session = Session() - if config.SMTP_USE_SSL: - smtp = smtplib.SMTP_SSL(host=config.SMTP_SERVER, port=config.SMTP_PORT) - else: - smtp = smtplib.SMTP(host=config.SMTP_SERVER, port=config.SMTP_PORT) - if config.SMTP_USE_TLS: - smtp.starttls() - try: - smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD) - except smtplib.SMTPHeloError: - logging.warning('The server didn’t reply properly to the HELO ' - 'greeting.') - return - except smtplib.SMTPAuthenticationError as e: - logging.warning('The server didn’t accept the username/password ' - 'combination.') - logging.warning(e) - return - except smtplib.SMTPNotSupportedError: - logging.warning('The AUTH command is not supported by the server.') - return - except smtplib.SMTPException: - logging.warning('No suitable authentication method was found.') - return - notification_service = NotificationService(smtp) - # create notifications (content, recipient etc.) - notifications = __create_mail_notifications(notification_service, session) - # only login and send mails if there are any notifications - if (len(notifications) > 0): - # combine new and unsent notifications - notifications.update(notification_service.not_sent) - # send all notifications - __send_mail_notifications(notifications, notification_service) - # remove unsent notifications because they have been sent now - # but only if mail limit has not been exceeded - if (notification_service.mail_limit_exceeded is not True): - notification_service.not_sent = {} - smtp.quit() - Session.remove() - - -# Email notification functions -def __create_mail_notifications(notification_service, session): - notification_email_data = session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all() # noqa - notifications = {} - for data in notification_email_data: - notification = Notification() - notification.set_addresses(config.SMTP_DEFAULT_SENDER, - data.job.user.email) - subject_template = ('[nopaque] Status update for your Job/Corpora: ' - '{title}!') - subject_template_values_dict = {'title': data.job.title} - url = '{}://{}/{}/{}'.format(config.PROTOCOL, - config.DOMAIN, - 'jobs', - data.job.id) - body_template_values_dict = {'username': data.job.user.username, - 'id': data.job.id, - 'title': data.job.title, - 'status': data.notify_status, - 'time': data.creation_date, - 'url': url} - txt_tmplt = os.path.join(ROOT_DIR, - 'libnotify/templates/notification.txt') - html_tmplt = os.path.join(ROOT_DIR, - 'libnotify/templates/notification.html') - notification.set_notification_content(subject_template, - subject_template_values_dict, - txt_tmplt, - html_tmplt, - body_template_values_dict) - notifications[data.job.id] = notification - # Using a dictionary for notifications avoids sending multiple mails - # if the status of a job changes in a few seconds. The user will not - # get swamped with mails for queued, running and complete if those - # happen in in a few seconds. Only the last update will be sent. - # This depends on the sleep time interval though. - session.delete(data) - session.commit() - return notifications - - -def __send_mail_notifications(notifications, notification_service): - for key, notification in notifications.items(): - try: - notification_service.send(notification) - notification_service.mail_limit_exceeded = False - except Exception: - # Adds notifications to unsent if mail server exceded limit for - # consecutive mail sending - logging.warning('limit') - notification_service.not_sent[key] = notification - notification_service.mail_limit_exceeded = True - notification_service.not_sent.update(notifications) diff --git a/daemon/boot.sh b/daemon/boot.sh deleted file mode 100755 index 53127dd0..00000000 --- a/daemon/boot.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -source venv/bin/activate -python nopaqued.py diff --git a/daemon/config.py b/daemon/config.py deleted file mode 100644 index 8729b563..00000000 --- a/daemon/config.py +++ /dev/null @@ -1,71 +0,0 @@ -import logging -import os - - -ROOT_DIR = os.path.abspath(os.path.dirname(__file__)) - - -class Config: - ''' # Email # ''' - SMTP_DEFAULT_SENDER = os.environ.get('NOPAQUE_SMTP_DEFAULT_SENDER') - SMTP_PASSWORD = os.environ.get('NOPAQUE_SMTP_PASSWORD') - SMTP_PORT = int(os.environ.get('NOPAQUE_SMTP_PORT')) - SMTP_SERVER = os.environ.get('NOPAQUE_SMTP_SERVER') - SMTP_USERNAME = os.environ.get('NOPAQUE_SMTP_USERNAME') - SMTP_USE_SSL = os.environ.get( - 'NOPAQUE_SMTP_USE_SSL', 'false').lower() == 'true' - SMTP_USE_TLS = os.environ.get( - 'NOPAQUE_SMTP_USE_TLS', 'false').lower() == 'true' - - ''' # General # ''' - DATA_DIR = os.environ.get('NOPAQUE_DATA_DIR', '/mnt/nopaque') - DOMAIN = os.environ.get('NOPAQUE_DOMAIN', 'localhost') - PROTOCOL = os.environ.get('NOPAQUE_PROTOCOL', 'http') - SECRET_KEY = os.environ.get('NOPAQUE_SECRET_KEY', 'hard to guess string') - - ''' # Logging # ''' - LOG_DATE_FORMAT = os.environ.get('NOPAQUE_LOG_DATE_FORMAT', - '%Y-%m-%d %H:%M:%S') - LOG_FILE = os.environ.get('NOPAQUED_LOG_FILE', - os.path.join(ROOT_DIR, 'nopaqued.log')) - LOG_FORMAT = os.environ.get( - 'NOPAQUE_LOG_FORMAT', - '[%(asctime)s] %(levelname)s in ' - '%(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s' - ) - LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL', 'WARNING') - - @classmethod - def init(cls): - # Set up logging according to the corresponding (LOG_*) variables - logging.basicConfig(datefmt=cls.LOG_DATE_FORMAT, - filename=cls.LOG_FILE, - format=cls.LOG_FORMAT, - level=cls.LOG_LEVEL) - - -class DevelopmentConfig(Config): - ''' # Database # ''' - SQLALCHEMY_DATABASE_URI = os.environ.get( - 'NOPAQUE_DEV_DATABASE_URL', - 'sqlite:///' + os.path.join(ROOT_DIR, 'data-dev.sqlite') - ) - - -class ProductionConfig(Config): - ''' # Database # ''' - SQLALCHEMY_DATABASE_URI = os.environ.get( - 'NOPAQUE_DATABASE_URL', - 'sqlite:///' + os.path.join(ROOT_DIR, 'data.sqlite') - ) - - -class TestingConfig(Config): - ''' # Database # ''' - SQLALCHEMY_DATABASE_URI = os.environ.get( - 'NOPAQUE_TEST_DATABASE_URL', 'sqlite://') - - -config = {'development': DevelopmentConfig, - 'production': ProductionConfig, - 'testing': TestingConfig} diff --git a/daemon/nopaqued.py b/daemon/nopaqued.py deleted file mode 100644 index 7fbb79dc..00000000 --- a/daemon/nopaqued.py +++ /dev/null @@ -1,13 +0,0 @@ -from dotenv import load_dotenv -from app import run -import os - - -# Load environment variables -DOTENV_FILE = os.path.join(os.path.dirname(__file__), '.env') -if os.path.exists(DOTENV_FILE): - load_dotenv(DOTENV_FILE) - - -if __name__ == '__main__': - run() diff --git a/daemon/requirements.txt b/daemon/requirements.txt deleted file mode 100644 index de767e32..00000000 --- a/daemon/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -docker -psycopg2 -python-dotenv -SQLAlchemy diff --git a/web/boot.sh b/web/boot.sh index d6536518..836e91ab 100755 --- a/web/boot.sh +++ b/web/boot.sh @@ -1,7 +1,7 @@ #!/bin/bash if [[ "${NOPAQUE_DAEMON_ENABLED}" == "True" ]]; then - echo "Starting nopaque daemon..." + echo "INFO Starting nopaque daemon process..." ./nopaque-daemon.sh & fi @@ -13,7 +13,7 @@ if [[ "${#}" -eq 0 ]]; then if [[ "${?}" == "0" ]]; then break fi - echo Deploy command failed, retrying in 5 secs... + echo "Deploy command failed, retrying in 5 secs..." sleep 5 done python nopaque.py From ba7789224bf241e642d3c05d16f8ac92b395f00b Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Thu, 19 Nov 2020 09:41:22 +0100 Subject: [PATCH 13/52] Create send_notification function --- web/app/models.py | 3 ++- web/app/tasks/job_utils.py | 32 ++++++++++++++++---------------- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/web/app/models.py b/web/app/models.py index e912bf17..4dd9dba9 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -140,7 +140,8 @@ class User(UserMixin, db.Model): @property def path(self): - return os.path.join(current_app.config['NOPAQUE_DATA_DIR'], str(self.id)) + return os.path.join(current_app.config['NOPAQUE_DATA_DIR'], + str(self.id)) @property def password(self): diff --git a/web/app/tasks/job_utils.py b/web/app/tasks/job_utils.py index 7a4e2109..d4f845fd 100644 --- a/web/app/tasks/job_utils.py +++ b/web/app/tasks/job_utils.py @@ -41,13 +41,7 @@ def create_job_service(job): else: job.status = 'queued' finally: - msg = create_message( - job.creator.email, - 'Status update for your Job "{}"'.format(job.title), - 'tasks/email/notification', - job=job - ) - mail.send(msg) + send_notification(job) def checkout_job_service(job): @@ -85,8 +79,6 @@ def checkout_job_service(job): + 'Details: {}'.format(e)) return else: - job.end_date = datetime.utcnow() - job.status = task_state if task_state == 'complete': job_results_dir = os.path.join(job.path, 'output') job_results = filter(lambda x: x.endswith('.zip'), @@ -94,14 +86,10 @@ def checkout_job_service(job): for job_result in job_results: job_result = JobResult(filename=job_result, job=job) db.session.add(job_result) + job.end_date = datetime.utcnow() + job.status = task_state finally: - msg = create_message( - job.creator.email, - 'Status update for your Job "{}"'.format(job.title), - 'tasks/email/notification', - job=job - ) - mail.send(msg) + send_notification(job) def remove_job_service(job): @@ -134,3 +122,15 @@ def remove_job_service(job): logging.error('Remove "{}" service raised '.format(service_name) # noqa + '[docker-APIError] The server returned an error. ' # noqa + 'Details: {}'.format(e)) + + +def send_notification(job): + if job.creator.setting_job_status_mail_notifications == 'none': + return + if (job.creator.setting_job_status_mail_notifications == 'end' + and job.status not in ['complete', 'failed']): + return + msg = create_message(job.creator.email, + 'Status update for your Job "{}"'.format(job.title), + 'tasks/email/notification', job=job) + mail.send(msg) From b606710edf96fae44b2b980125e330285df4ab84 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Thu, 19 Nov 2020 12:29:40 +0100 Subject: [PATCH 14/52] Do not activate the python venv, everytime you execute daemon tasks --- web/boot.sh | 4 ++-- web/nopaque-daemon.sh | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/web/boot.sh b/web/boot.sh index 836e91ab..28b5013e 100755 --- a/web/boot.sh +++ b/web/boot.sh @@ -1,12 +1,12 @@ #!/bin/bash +source venv/bin/activate + if [[ "${NOPAQUE_DAEMON_ENABLED}" == "True" ]]; then echo "INFO Starting nopaque daemon process..." ./nopaque-daemon.sh & fi -source venv/bin/activate - if [[ "${#}" -eq 0 ]]; then while true; do flask deploy diff --git a/web/nopaque-daemon.sh b/web/nopaque-daemon.sh index 7a903646..bf2262dc 100755 --- a/web/nopaque-daemon.sh +++ b/web/nopaque-daemon.sh @@ -1,6 +1,7 @@ #!/bin/bash -source venv/bin/activate +# The nopaque daemon is essentially just a loop in which the daemon tasks are +# periodically executed while true; do flask tasks sleep 10 From 66887f21b84b48732b2a7b07e49e2a9509a14d4c Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Thu, 19 Nov 2020 12:30:13 +0100 Subject: [PATCH 15/52] Add config variables to generate url outside of request contexts --- web/config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/web/config.py b/web/config.py index 97e07697..7740ef96 100644 --- a/web/config.py +++ b/web/config.py @@ -8,7 +8,9 @@ ROOT_DIR = os.path.abspath(os.path.dirname(__file__)) class Config: ''' # Flask # ''' + PREFERRED_URL_SCHEME = os.environ.get('PREFERRED_URL_SCHEME', 'http') SECRET_KEY = os.environ.get('SECRET_KEY', 'hard to guess string') + SERVER_NAME = os.environ.get('SERVER_NAME') SESSION_COOKIE_SECURE = \ os.environ.get('SESSION_COOKIE_SECURE', 'false').lower() == 'true' From ab5db4ae83402a32b8a0340bd46587e079396365 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Thu, 19 Nov 2020 12:31:29 +0100 Subject: [PATCH 16/52] Codestyle update (less use of #noqa) --- web/app/tasks/corpus_utils.py | 105 +++++++++++++++++++++------------- web/app/tasks/job_utils.py | 72 ++++++++++++++--------- 2 files changed, 110 insertions(+), 67 deletions(-) diff --git a/web/app/tasks/corpus_utils.py b/web/app/tasks/corpus_utils.py index dd37ad62..52809e5f 100644 --- a/web/app/tasks/corpus_utils.py +++ b/web/app/tasks/corpus_utils.py @@ -25,13 +25,16 @@ def create_build_corpus_service(corpus): 'name': 'build-corpus_{}'.format(corpus.id), 'restart_policy': docker.types.RestartPolicy() } - service_image = 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' # noqa + service_image = \ + 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' try: docker_client.services.create(service_image, **service_kwargs) except docker.errors.APIError as e: - logging.error('Create "{}" service raised '.format(service_kwargs['name']) # noqa - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Create "{}" service raised '.format(service_kwargs['name']) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) else: corpus.status = 'queued' @@ -41,18 +44,24 @@ def checkout_build_corpus_service(corpus): try: service = docker_client.services.get(service_name) except docker.errors.NotFound: - logging.error('Get "{}" service raised '.format(service_name) - + '[docker-NotFound] The service does not exist. ' - + '(corpus.status: {} -> failed)'.format(corpus.status)) + logging.error( + 'Get "{}" service raised '.format(service_name) + + '"docker.errors.NotFound" The service does not exist. ' + + '(corpus.status: {} -> failed)'.format(corpus.status) + ) corpus.status = 'failed' except docker.errors.APIError as e: - logging.error('Get "{}" service raised '.format(service_name) - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Get "{}" service raised '.format(service_name) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) except docker.errors.InvalidVersion: - logging.error('Get "{}" service raised '.format(service_name) - + '[docker-InvalidVersion] One of the arguments is ' - + 'not supported with the current API version.') + logging.error( + 'Get "{}" service raised '.format(service_name) + + '"docker.errors.InvalidVersion" One of the arguments is ' + + 'not supported with the current API version.' + ) else: service_tasks = service.tasks() if not service_tasks: @@ -60,13 +69,16 @@ def checkout_build_corpus_service(corpus): task_state = service_tasks[0].get('Status').get('State') if corpus.status == 'queued' and task_state != 'pending': corpus.status = 'running' - elif corpus.status == 'running' and task_state in ['complete', 'failed']: # noqa + elif (corpus.status == 'running' + and task_state in ['complete', 'failed']): try: service.remove() except docker.errors.APIError as e: - logging.error('Remove "{}" service raised '.format(service_name) # noqa - + '[docker-APIError] The server returned an error. ' # noqa - + 'Details: {}'.format(e)) + logging.error( + 'Remove "{}" service raised '.format(service_name) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return else: corpus.status = 'prepared' if task_state == 'complete' \ @@ -84,7 +96,8 @@ def create_cqpserver_container(corpus): 'name': 'cqpserver_{}'.format(corpus.id), 'network': 'nopaque_default' } - container_image = 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' # noqa + container_image = \ + 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' # Check if a cqpserver container already exists. If this is the case, # remove it and create a new one try: @@ -92,35 +105,45 @@ def create_cqpserver_container(corpus): except docker.errors.NotFound: pass except docker.errors.APIError as e: - logging.error('Get "{}" container raised '.format(container_kwargs['name']) - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Get "{}" container raised '.format(container_kwargs['name']) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return else: try: container.remove(force=True) except docker.errors.APIError as e: - logging.error('Remove "{}" container raised '.format(container_kwargs['name']) - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Remove "{}" container raised '.format(container_kwargs['name']) # noqa + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return try: docker_client.containers.run(container_image, **container_kwargs) except docker.errors.ContainerError: # This case should not occur, because detach is True. - logging.error('Run "{}" container raised '.format(container_kwargs['name']) - + '[docker-ContainerError] The container exits with a ' - + 'non-zero exit code and detach is False.') + logging.error( + 'Run "{}" container raised '.format(container_kwargs['name']) + + '"docker.errors.ContainerError" The container exits with a ' + + 'non-zero exit code and detach is False.' + ) corpus.status = 'failed' except docker.errors.ImageNotFound: - logging.error('Run "{}" container raised '.format(container_kwargs['name']) - + '[docker-ImageNotFound] The specified image does not ' - + 'exist.') + logging.error( + 'Run "{}" container raised '.format(container_kwargs['name']) + + '"docker.errors.ImageNotFound" The specified image does not ' + + 'exist.' + ) corpus.status = 'failed' except docker.errors.APIError as e: - logging.error('Run "{}" container raised '.format(container_kwargs['name']) - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Run "{}" container raised '.format(container_kwargs['name']) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) else: corpus.status = 'analysing' @@ -132,16 +155,20 @@ def remove_cqpserver_container(corpus): except docker.errors.NotFound: pass except docker.errors.APIError as e: - logging.error('Get "{}" container raised '.format(container_name) - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Get "{}" container raised '.format(container_name) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return else: try: container.remove(force=True) except docker.errors.APIError as e: - logging.error('Remove "{}" container raised '.format(container_name) - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Remove "{}" container raised '.format(container_name) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return corpus.status = 'prepared' diff --git a/web/app/tasks/job_utils.py b/web/app/tasks/job_utils.py index d4f845fd..19a75b81 100644 --- a/web/app/tasks/job_utils.py +++ b/web/app/tasks/job_utils.py @@ -29,14 +29,16 @@ def create_job_service(job): mem_reservation=job.mem_mb * (10 ** 6) ), 'restart_policy': docker.types.RestartPolicy()} - service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/' - + job.service + ':' + job.service_version) + service_image = 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/{}:{}'.format( + job.service, job.service_version) try: docker_client.services.create(service_image, **service_kwargs) except docker.errors.APIError as e: - logging.error('Create "{}" service raised '.format(service_kwargs['name']) # noqa - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Create "{}" service raised '.format(service_kwargs['name']) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return else: job.status = 'queued' @@ -50,18 +52,22 @@ def checkout_job_service(job): service = docker_client.services.get(service_name) except docker.errors.NotFound: logging.error('Get "{}" service raised '.format(service_name) - + '[docker-NotFound] The service does not exist. ' + + '"docker.errors.NotFound" The service does not exist. ' + '(job.status: {} -> failed)'.format(job.status)) job.status = 'failed' except docker.errors.APIError as e: - logging.error('Get "{}" service raised '.format(service_name) - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Get "{}" service raised '.format(service_name) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return except docker.errors.InvalidVersion: - logging.error('Get "{}" service raised '.format(service_name) - + '[docker-InvalidVersion] One of the arguments is ' - + 'not supported with the current API version.') + logging.error( + 'Get "{}" service raised '.format(service_name) + + '"docker.errors.InvalidVersion" One of the arguments is ' + + 'not supported with the current API version.' + ) return else: service_tasks = service.tasks() @@ -70,13 +76,15 @@ def checkout_job_service(job): task_state = service_tasks[0].get('Status').get('State') if job.status == 'queued' and task_state != 'pending': job.status = 'running' - elif job.status == 'running' and task_state in ['complete', 'failed']: # noqa + elif job.status == 'running' and task_state in ['complete', 'failed']: try: service.remove() except docker.errors.APIError as e: - logging.error('Remove "{}" service raised '.format(service_name) # noqa - + '[docker-APIError] The server returned an error. ' # noqa - + 'Details: {}'.format(e)) + logging.error( + 'Remove "{}" service raised '.format(service_name) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return else: if task_state == 'complete': @@ -99,29 +107,37 @@ def remove_job_service(job): except docker.errors.NotFound: job.status = 'canceled' except docker.errors.APIError as e: - logging.error('Get "{}" service raised '.format(service_name) - + '[docker-APIError] The server returned an error. ' - + 'Details: {}'.format(e)) + logging.error( + 'Get "{}" service raised '.format(service_name) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return except docker.errors.InvalidVersion: - logging.error('Get "{}" service raised '.format(service_name) - + '[docker-InvalidVersion] One of the arguments is ' - + 'not supported with the current API version.') + logging.error( + 'Get "{}" service raised '.format(service_name) + + '"docker.errors.InvalidVersion" One of the arguments is ' + + 'not supported with the current API version.' + ) return else: try: service.update(mounts=None) except docker.errors.APIError as e: - logging.error('Update "{}" service raised '.format(service_name) # noqa - + '[docker-APIError] The server returned an error. ' # noqa - + 'Details: {}'.format(e)) + logging.error( + 'Update "{}" service raised '.format(service_name) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) return try: service.remove() except docker.errors.APIError as e: - logging.error('Remove "{}" service raised '.format(service_name) # noqa - + '[docker-APIError] The server returned an error. ' # noqa - + 'Details: {}'.format(e)) + logging.error( + 'Remove "{}" service raised '.format(service_name) + + '"docker.errors.APIError" The server returned an error. ' + + 'Details: {}'.format(e) + ) def send_notification(job): From b786bbdfb17fe1a7a703693ca89aa3e1f50c753d Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Thu, 19 Nov 2020 15:20:18 +0100 Subject: [PATCH 17/52] Change default value of NOPAQUE_DAEMON_ENABLED from False to True --- .env.tpl | 2 +- web/boot.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.env.tpl b/.env.tpl index 25884399..64a48620 100644 --- a/.env.tpl +++ b/.env.tpl @@ -121,7 +121,7 @@ NOPAQUE_ADMIN= # NOPAQUE_DATA_DIR= # CHOOSE ONE: False, True -# DEFAULT: False +# DEFAULT: True # NOPAQUE_DAEMON_ENABLED= # The hostname or IP address for the server to listen on. diff --git a/web/boot.sh b/web/boot.sh index 28b5013e..23edce22 100755 --- a/web/boot.sh +++ b/web/boot.sh @@ -2,7 +2,7 @@ source venv/bin/activate -if [[ "${NOPAQUE_DAEMON_ENABLED}" == "True" ]]; then +if [[ "${NOPAQUE_DAEMON_ENABLED:-True}" == "True" ]]; then echo "INFO Starting nopaque daemon process..." ./nopaque-daemon.sh & fi From 8cc748de94371b35364659b00c0a795f6540dfcd Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Thu, 19 Nov 2020 15:20:49 +0100 Subject: [PATCH 18/52] add stuff to enable instance scaling --- README.md | 8 +++++++- docker-compose.scale.yml | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 docker-compose.scale.yml diff --git a/README.md b/README.md index 8c233226..f991459a 100644 --- a/README.md +++ b/README.md @@ -56,6 +56,12 @@ username@hostname:~$ docker-compose build ``` bash # Create log files touch nopaque.log nopaqued.log -# For background execution add the -d flag and to scale the app, add --scale web= +# For background execution add the -d flag username@hostname:~$ docker-compose up +# To scale your app use +username@hostname:~$ docker-compose -f docker-compose.yml \ + -f docker-compose.override.yml + -f docker-compose.scale.yml + up + -d --no-recreate --scale nopaque= ``` diff --git a/docker-compose.scale.yml b/docker-compose.scale.yml new file mode 100644 index 00000000..f13b3550 --- /dev/null +++ b/docker-compose.scale.yml @@ -0,0 +1,6 @@ +version: "3.5" + +services: + nopaque: + environment: + - NOPAQUE_DAEMON_ENABLED=False From c3c3b70030f7007d30adcf411147bef528c4e6dc Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Sun, 22 Nov 2020 16:01:59 +0100 Subject: [PATCH 19/52] Change the verification, it a user is allowed to view a corpus_file --- web/app/corpora/views.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/web/app/corpora/views.py b/web/app/corpora/views.py index 13874243..95f8681d 100644 --- a/web/app/corpora/views.py +++ b/web/app/corpora/views.py @@ -242,12 +242,12 @@ def download_corpus_file(corpus_id, corpus_file_id): methods=['GET', 'POST']) @login_required def corpus_file(corpus_id, corpus_file_id): - corpus_file = CorpusFile.query.get_or_404(corpus_file_id) - if corpus_file.corpus_id != corpus_id: - abort(404) - if not (corpus_file.corpus.creator == current_user - or current_user.is_administrator()): + corpus = Corpus.query.get_or_404(corpus_id) + if not (corpus.creator == current_user or current_user.is_administrator()): abort(403) + corpus_file = CorpusFile.query.get_or_404(corpus_file_id) + if corpus_file.corpus != corpus: + abort(404) form = EditCorpusFileForm(prefix='edit-corpus-file-form') if form.validate_on_submit(): corpus_file.address = form.address.data @@ -292,9 +292,9 @@ def prepare_corpus(corpus_id): abort(403) if corpus.files.all(): tasks.build_corpus(corpus_id) - flash('Corpus "{}" has been marked to get build!', 'corpus') + flash('Corpus "{}" has been marked to get build!'.format(corpus.title), 'corpus') # noqa else: - flash('Can not build corpus "{}": No corpus file(s)!', 'error') + flash('Can not build corpus "{}": No corpus file(s)!'.format(corpus.title), 'error') # noqa return redirect(url_for('.corpus', corpus_id=corpus_id)) From d1b39449e800136df7b4d06bef404deff2674b7b Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Sun, 22 Nov 2020 16:02:51 +0100 Subject: [PATCH 20/52] Fix service icons in job lists, Readd publishing year in corpus_file lists --- web/app/static/js/nopaque.lists.js | 26 ++++++++------------------ 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/web/app/static/js/nopaque.lists.js b/web/app/static/js/nopaque.lists.js index 8d7b6159..c907f5be 100644 --- a/web/app/static/js/nopaque.lists.js +++ b/web/app/static/js/nopaque.lists.js @@ -14,7 +14,7 @@ class RessourceList extends List { _init(ressources) { this.clear(); this._add(Object.values(ressources)); - this.sort("creation_date", {order: "desc"}); + this.sort("id", {order: "desc"}); } @@ -84,7 +84,9 @@ RessourceList.dataMappers = { CorpusFile: corpus_file => ({ author: corpus_file.author, filename: corpus_file.filename, + id: corpus_file.id, link: `${corpus_file.corpus_id}/files/${corpus_file.id}`, + "publishing-year": corpus_file.publishing_year, title: corpus_file.title, title1: corpus_file.title, "delete-link": `/corpora/${corpus_file.corpus_id}/files/${corpus_file.id}/delete`, @@ -98,7 +100,7 @@ RessourceList.dataMappers = { description: job.description, id: job.id, link: `/jobs/${job.id}`, - service: job.service, + service: job.service.name, status: job.status, title: job.title, title1: job.title, @@ -143,22 +145,7 @@ RessourceList.dataMappers = { RessourceList.options = { // common list.js options for 5 rows per page etc. - common: { - page: 5, - pagination: [ - { - name: "paginationTop", - paginationClass: "paginationTop", - innerWindow: 4, - outerWindow: 1 - }, - { - paginationClass: "paginationBottom", - innerWindow: 4, - outerWindow: 1, - }, - ], - }, + common: {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}, // extended list.js options for 10 rows per page etc. extended: { page: 10, @@ -238,6 +225,7 @@ RessourceList.options = { +
    @@ -265,8 +253,10 @@ RessourceList.options = { valueNames: [ "author", "filename", + "publishing-year", "title", "title1", + {data: ["id"]}, {name: "delete-link", attr: "href"}, {name: "delete-modal-trigger", attr: "data-target"}, {name: "delete-modal", attr: "id"}, From ece2d450d77050e858ac3c2b8a29d7e686ab9f7f Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Sun, 22 Nov 2020 16:03:19 +0100 Subject: [PATCH 21/52] Rename label of build corpus services --- web/app/tasks/corpus_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/app/tasks/corpus_utils.py b/web/app/tasks/corpus_utils.py index 52809e5f..2167da46 100644 --- a/web/app/tasks/corpus_utils.py +++ b/web/app/tasks/corpus_utils.py @@ -17,7 +17,7 @@ def create_build_corpus_service(corpus): 'command': 'docker-entrypoint.sh build-corpus', 'constraints': ['node.role==worker'], 'labels': {'origin': 'nopaque', - 'type': 'corpus.prepare', + 'type': 'corpus.build', 'corpus_id': str(corpus.id)}, 'mounts': [corpus_file + ':/root/files/corpus.vrt:ro', corpus_data_dir + ':/corpora/data:rw', From 501871f9a782f53affac2ff80c4ef5e833e81253 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Sun, 22 Nov 2020 16:03:41 +0100 Subject: [PATCH 22/52] Remove double paginations --- web/app/templates/admin/user.html.j2 | 6 ++---- web/app/templates/corpora/corpus.html.j2 | 6 +++--- web/app/templates/jobs/job.html.j2 | 3 +-- web/app/templates/main/dashboard.html.j2 | 9 +++------ 4 files changed, 9 insertions(+), 15 deletions(-) diff --git a/web/app/templates/admin/user.html.j2 b/web/app/templates/admin/user.html.j2 index 570b6dd3..97e3137a 100644 --- a/web/app/templates/admin/user.html.j2 +++ b/web/app/templates/admin/user.html.j2 @@ -45,7 +45,6 @@
    -
      @@ -60,7 +59,7 @@
      -
        +
          @@ -74,7 +73,6 @@ -
            @@ -89,7 +87,7 @@
            -
              +
                diff --git a/web/app/templates/corpora/corpus.html.j2 b/web/app/templates/corpora/corpus.html.j2 index c60b9e01..f89b1a22 100644 --- a/web/app/templates/corpora/corpus.html.j2 +++ b/web/app/templates/corpora/corpus.html.j2 @@ -82,13 +82,13 @@ -
                  + @@ -102,7 +102,7 @@ {% endif %}
                  Filename Author TitlePublishing year {# Actions #}
                  -
                    +
                      addAdd corpus file @@ -116,7 +116,7 @@
                      diff --git a/web/app/templates/main/dashboard.html.j2 b/web/app/templates/main/dashboard.html.j2 index 4336178a..56602f8c 100644 --- a/web/app/templates/main/dashboard.html.j2 +++ b/web/app/templates/main/dashboard.html.j2 @@ -29,7 +29,6 @@ -
                        @@ -44,7 +43,7 @@
                        -
                          +
                            import_exportImport Corpus @@ -60,7 +59,6 @@
                            -
                              @@ -84,7 +82,7 @@
                              -
                                +
                                  Add query resultfile_upload @@ -104,7 +102,6 @@
                                  -
                                    @@ -119,7 +116,7 @@
                                    -
                                      +

                                        addNew job

                                        From adac32e54f1d90c13035fa2afe2f4d865c4918a0 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Mon, 23 Nov 2020 08:47:55 +0100 Subject: [PATCH 23/52] Fix bug --- web/app/templates/modals/analysis_init.html.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/app/templates/modals/analysis_init.html.j2 b/web/app/templates/modals/analysis_init.html.j2 index b4f189cf..00ad8c0f 100644 --- a/web/app/templates/modals/analysis_init.html.j2 +++ b/web/app/templates/modals/analysis_init.html.j2 @@ -5,7 +5,7 @@

                                        If the loading takes to long or an error occured, click here to refresh your session or - go back! + go back!

                                        From a25cbf4bb03c3faec00e3144617340dbdabaf40f Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Mon, 23 Nov 2020 09:07:57 +0100 Subject: [PATCH 24/52] Add some more documentation to environment variables --- .env.tpl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.env.tpl b/.env.tpl index 64a48620..112e3591 100644 --- a/.env.tpl +++ b/.env.tpl @@ -125,12 +125,14 @@ NOPAQUE_ADMIN= # NOPAQUE_DAEMON_ENABLED= # The hostname or IP address for the server to listen on. -# HINT: To use a domain locally, add any names that should route to the app to your hosts file. # DEFAULT: 0.0.0.0 +# NOTES: To use a domain locally, add any names that should route to the app to your hosts file. +# If nopaque is running in a Docker container, you propably want to use the default value. # NOPAQUE_HOST= # The port number for the server to listen on. # DEFAULT: 5000 +# NOTE: If nopaque is running in a Docker container, you propably want to use the default value. # NOPAQUE_PORT= # transport://[userid:password]@hostname[:port]/[virtual_host] From 9dde839148e0bf15f96aba44c03bb8738caf756b Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Wed, 2 Dec 2020 14:25:46 +0100 Subject: [PATCH 25/52] Add migration for new database design --- web/migrations/versions/6c2227f1cc77_.py | 59 ++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 web/migrations/versions/6c2227f1cc77_.py diff --git a/web/migrations/versions/6c2227f1cc77_.py b/web/migrations/versions/6c2227f1cc77_.py new file mode 100644 index 00000000..9d58746e --- /dev/null +++ b/web/migrations/versions/6c2227f1cc77_.py @@ -0,0 +1,59 @@ +"""empty message + +Revision ID: 6c2227f1cc77 +Revises: befe5326787e +Create Date: 2020-12-02 08:50:45.880062 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '6c2227f1cc77' +down_revision = 'befe5326787e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('notification_data') + op.drop_table('notification_email_data') + op.drop_column('corpus_files', 'dir') + op.drop_column('job_inputs', 'dir') + op.drop_column('job_results', 'dir') + op.drop_column('jobs', 'secure_filename') + op.alter_column('roles', 'permissions', + existing_type=sa.BIGINT(), + type_=sa.Integer(), + existing_nullable=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('roles', 'permissions', + existing_type=sa.Integer(), + type_=sa.BIGINT(), + existing_nullable=True) + op.add_column('jobs', sa.Column('secure_filename', sa.VARCHAR(length=32), autoincrement=False, nullable=True)) + op.add_column('job_results', sa.Column('dir', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) + op.add_column('job_inputs', sa.Column('dir', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) + op.add_column('corpus_files', sa.Column('dir', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) + op.create_table('notification_email_data', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('job_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('notify_status', sa.VARCHAR(length=16), autoincrement=False, nullable=True), + sa.Column('creation_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['job_id'], ['jobs.id'], name='notification_email_data_job_id_fkey'), + sa.PrimaryKeyConstraint('id', name='notification_email_data_pkey') + ) + op.create_table('notification_data', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('job_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('notified_on', sa.VARCHAR(length=16), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['job_id'], ['jobs.id'], name='notification_data_job_id_fkey'), + sa.PrimaryKeyConstraint('id', name='notification_data_pkey') + ) + # ### end Alembic commands ### From 4c92fdfb6caa2ec6a1755c21fad0cecc9698f58c Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Wed, 2 Dec 2020 14:26:17 +0100 Subject: [PATCH 26/52] Fix file path management and download bugs --- web/app/corpora/views.py | 8 ++++---- web/app/jobs/views.py | 13 +++++-------- web/app/models.py | 5 +++-- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/web/app/corpora/views.py b/web/app/corpora/views.py index 95f8681d..3bf1d7e6 100644 --- a/web/app/corpora/views.py +++ b/web/app/corpora/views.py @@ -234,7 +234,7 @@ def download_corpus_file(corpus_id, corpus_file_id): or current_user.is_administrator()): abort(403) return send_from_directory(as_attachment=True, - directory=corpus_file.corpus.path, + directory=os.path.dirname(corpus_file.path), filename=corpus_file.filename) @@ -382,8 +382,7 @@ def inspect_query_result(query_result_id): inspect_display_options_form = InspectDisplayOptionsForm( prefix='inspect-display-options-form' ) - query_result_file_path = os.path.join(query_result.path, query_result.filename) # noqa - with open(query_result_file_path, 'r') as query_result_file: + with open(query_result.path, 'r') as query_result_file: query_result_file_content = json.load(query_result_file) return render_template('corpora/query_results/inspect.html.j2', query_result=query_result, @@ -413,5 +412,6 @@ def download_query_result(query_result_id): if not (query_result.creator == current_user or current_user.is_administrator()): abort(403) - return send_from_directory(as_attachment=True, directory=query_result.path, + return send_from_directory(as_attachment=True, + directory=os.path.dirname(query_result.path), filename=query_result.filename) diff --git a/web/app/jobs/views.py b/web/app/jobs/views.py index 739f153c..ffb5df15 100644 --- a/web/app/jobs/views.py +++ b/web/app/jobs/views.py @@ -5,6 +5,7 @@ from . import jobs from . import tasks from ..decorators import admin_required from ..models import Job, JobInput, JobResult +import os @jobs.route('/') @@ -32,14 +33,12 @@ def delete_job(job_id): @jobs.route('//inputs//download') @login_required def download_job_input(job_id, job_input_id): - job_input = JobInput.query.get_or_404(job_input_id) - if not job_input.job_id == job_id: - abort(404) + job_input = JobInput.query.filter(JobInput.job_id == job_id, JobInput.id == job_input_id).first_or_404() # noqa if not (job_input.job.creator == current_user or current_user.is_administrator()): abort(403) return send_from_directory(as_attachment=True, - directory=job_input.job.path, + directory=os.path.dirname(job_input.path), filename=job_input.filename) @@ -59,12 +58,10 @@ def restart(job_id): @jobs.route('//results//download') @login_required def download_job_result(job_id, job_result_id): - job_result = JobResult.query.get_or_404(job_result_id) - if not job_result.job_id == job_id: - abort(404) + job_result = JobResult.query.filter(JobResult.job_id == job_id, JobResult.id == job_result_id).first_or_404() # noqa if not (job_result.job.creator == current_user or current_user.is_administrator()): abort(403) return send_from_directory(as_attachment=True, - directory=job_result.job.path, + directory=os.path.dirname(job_result.path), filename=job_result.filename) diff --git a/web/app/models.py b/web/app/models.py index 4dd9dba9..4ca7095f 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -311,7 +311,7 @@ class JobResult(db.Model): @property def path(self): - return os.path.join(self.job.path, self.filename) + return os.path.join(self.job.path, 'output', self.filename) def __repr__(self): ''' @@ -561,7 +561,8 @@ class QueryResult(db.Model): @property def path(self): - return os.path.join(self.creator.path, 'query_results', str(self.id)) + return os.path.join( + self.creator.path, 'query_results', str(self.id), self.filename) def delete(self): shutil.rmtree(self.path, ignore_errors=True) From 29dbdc9f949c6ab6b226df68976484e5473353a3 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Thu, 3 Dec 2020 15:13:24 +0100 Subject: [PATCH 27/52] Add URL property to some models --- web/app/models.py | 51 ++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/web/app/models.py b/web/app/models.py index 4ca7095f..1fde6add 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -1,5 +1,5 @@ from datetime import datetime -from flask import current_app +from flask import current_app, url_for from flask_login import UserMixin, AnonymousUserMixin from itsdangerous import BadSignature, TimedJSONWebSignatureSerializer from time import sleep @@ -281,10 +281,20 @@ class JobInput(db.Model): # Fields filename = db.Column(db.String(255)) + @property + def download_url(self): + return url_for('job.download_job_input', job_id=self.job_id, + job_input_id=self.id) + @property def path(self): return os.path.join(self.job.path, self.filename) + @property + def url(self): + return url_for('jobs.job', job_id=self.job_id, + _anchor='job-{}-input-{}'.format(self.job_id, self.id)) + def __repr__(self): ''' String representation of the JobInput. For human readability. @@ -292,7 +302,9 @@ class JobInput(db.Model): return ''.format(self.filename) def to_dict(self): - return {'id': self.id, + return {'download_url': self.download_url, + 'url': self.url, + 'id': self.id, 'job_id': self.job_id, 'filename': self.filename} @@ -309,10 +321,20 @@ class JobResult(db.Model): # Fields filename = db.Column(db.String(255)) + @property + def download_url(self): + return url_for('job.download_job_result', job_id=self.job_id, + job_result_id=self.id) + @property def path(self): return os.path.join(self.job.path, 'output', self.filename) + @property + def url(self): + return url_for('jobs.job', job_id=self.job_id, + _anchor='job-{}-result-{}'.format(self.job_id, self.id)) + def __repr__(self): ''' String representation of the JobResult. For human readability. @@ -320,7 +342,9 @@ class JobResult(db.Model): return ''.format(self.filename) def to_dict(self): - return {'id': self.id, + return {'download_url': self.download_url, + 'url': self.url, + 'id': self.id, 'job_id': self.job_id, 'filename': self.filename} @@ -359,6 +383,10 @@ class Job(db.Model): def path(self): return os.path.join(self.creator.path, 'jobs', str(self.id)) + @property + def path(self): + return url_for('job.job', job_id=self.id) + def __repr__(self): ''' String representation of the Job. For human readability. @@ -395,7 +423,8 @@ class Job(db.Model): self.status = 'submitted' def to_dict(self): - return {'id': self.id, + return {'url': self.url, + 'id': self.id, 'user_id': self.user_id, 'creation_date': self.creation_date.timestamp(), 'description': self.description, @@ -435,10 +464,20 @@ class CorpusFile(db.Model): school = db.Column(db.String(255)) title = db.Column(db.String(255)) + @property + def download_url(self): + return url_for('corpora.download_corpus_file', + corpus_id=self.corpus_id, corpus_file_id=self.id) + @property def path(self): return os.path.join(self.corpus.path, self.filename) + @property + def url(self): + return url_for('corpora.corpus_file', corpus_id=self.corpus_id, + corpus_file_id=self.id) + def delete(self): try: os.remove(self.path) @@ -449,7 +488,9 @@ class CorpusFile(db.Model): self.corpus.status = 'unprepared' def to_dict(self): - return {'id': self.id, + return {'download_url': self.download_url, + 'url': self.url, + 'id': self.id, 'corpus_id': self.corpus_id, 'address': self.address, 'author': self.author, From a2102a48ca2ac0a0c9b6f3353ee82befcb52758e Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Fri, 4 Dec 2020 14:16:00 +0100 Subject: [PATCH 28/52] Add url properties to db models --- web/app/models.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/web/app/models.py b/web/app/models.py index 1fde6add..f22042d5 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -533,8 +533,13 @@ class Corpus(db.Model): def path(self): return os.path.join(self.creator.path, 'corpora', str(self.id)) + @property + def url(self): + return url_for('corpora.corpus', corpus_id=self.id) + def to_dict(self): - return {'id': self.id, + return {'url': self.url, + 'id': self.id, 'user_id': self.user_id, 'creation_date': self.creation_date.timestamp(), 'description': self.description, @@ -600,17 +605,28 @@ class QueryResult(db.Model): query_metadata = db.Column(db.JSON()) title = db.Column(db.String(32)) + @property + def download_url(self): + return url_for('corpora.download_query_result', + query_result_id=self.id) + @property def path(self): return os.path.join( self.creator.path, 'query_results', str(self.id), self.filename) + @property + def url(self): + return url_for('corpora.query_result', query_result_id=self.id) + def delete(self): shutil.rmtree(self.path, ignore_errors=True) db.session.delete(self) def to_dict(self): - return {'id': self.id, + return {'download_url': self.download_url, + 'url': self.url, + 'id': self.id, 'user_id': self.user_id, 'description': self.description, 'filename': self.filename, From 1883a9bc632f3ab22ce75ef630e61783646c6f2a Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Fri, 4 Dec 2020 14:16:11 +0100 Subject: [PATCH 29/52] Start redesign of RessourceLists --- web/app/static/js/nopaque.lists.new.js | 68 ++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 web/app/static/js/nopaque.lists.new.js diff --git a/web/app/static/js/nopaque.lists.new.js b/web/app/static/js/nopaque.lists.new.js new file mode 100644 index 00000000..564bae45 --- /dev/null +++ b/web/app/static/js/nopaque.lists.new.js @@ -0,0 +1,68 @@ +class RessourceList extends List { + constructor(idOrElement, options) { + super(idOrElement, {...RessourceList.options['default'], ...(options ? options : {})}); + } + + _init(ressources) { + this.clear(); + this._add(Object.values(ressources)); + this.sort("id", {order: "desc"}); + } + + + _update(patch) { + let item, pathArray; + + for (let operation of patch) { + /* + * '/{ressourceName}/{ressourceId}/...' -> ['{ressourceId}', ...] + * Example: '/jobs/1/status' -> ['1', 'status'] + */ + pathArray = operation.path.split("/").slice(2); + switch(operation.op) { + case "add": + this.add_handler([operation.value]); + break; + case "remove": + this.remove_handler(pathArray[0]); + break; + case "replace": + this.replace_handler(pathArray[0], pathArray[1], operation.value); + break; + default: + break; + } + } + } + + add_handler(values, callback) { + if (this.hasOwnProperty('add_')) { + this.add_(values, callback); + } else { + this.add(values, callback); + } + } + + remove_handler(id) { + if (this.hasOwnProperty('remove_')) { + this.remove_(id); + } else { + this.remove(id); + } + } + + replace_handler(id, valueName, newValue) { + let item = this.get('id', id); + if (this.hasOwnProperty('add_')) + item.values({valueName: operation.value}); + } +} + + +RessourceList.options = { + // default RessourceList options + default: {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}, +}; + + +export { RessourceList, }; From 1003c4494d7fbb8e1e7b22af15c3edb8cba01ec4 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Mon, 7 Dec 2020 16:10:40 +0100 Subject: [PATCH 30/52] Progress on list rework --- web/app/models.py | 23 +- web/app/static/js/nopaque.js | 24 +- web/app/static/js/nopaque.lists.js | 506 +++++------------------ web/app/static/js/nopaque.lists.js.bak | 420 +++++++++++++++++++ web/app/static/js/nopaque.lists.new.js | 131 ++++-- web/app/templates/main/dashboard.html.j2 | 8 +- 6 files changed, 659 insertions(+), 453 deletions(-) create mode 100644 web/app/static/js/nopaque.lists.js.bak diff --git a/web/app/models.py b/web/app/models.py index f22042d5..6b7314ae 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -283,7 +283,7 @@ class JobInput(db.Model): @property def download_url(self): - return url_for('job.download_job_input', job_id=self.job_id, + return url_for('jobs.download_job_input', job_id=self.job_id, job_input_id=self.id) @property @@ -323,7 +323,7 @@ class JobResult(db.Model): @property def download_url(self): - return url_for('job.download_job_result', job_id=self.job_id, + return url_for('jobs.download_job_result', job_id=self.job_id, job_result_id=self.id) @property @@ -384,8 +384,8 @@ class Job(db.Model): return os.path.join(self.creator.path, 'jobs', str(self.id)) @property - def path(self): - return url_for('job.job', job_id=self.id) + def url(self): + return url_for('jobs.job', job_id=self.id) def __repr__(self): ''' @@ -430,9 +430,9 @@ class Job(db.Model): 'description': self.description, 'end_date': (self.end_date.timestamp() if self.end_date else None), - 'service': {'args': self.service_args, - 'name': self.service, - 'version': self.service_version}, + 'service': self.service, + 'service_args': self.service_args, + 'service_version': self.service_version, 'status': self.status, 'title': self.title, 'inputs': {input.id: input.to_dict() for input in self.inputs}, @@ -529,6 +529,10 @@ class Corpus(db.Model): files = db.relationship('CorpusFile', backref='corpus', lazy='dynamic', cascade='save-update, merge, delete') + @property + def analysis_url(self): + return url_for('corpora.analyse_corpus', corpus_id=self.id) + @property def path(self): return os.path.join(self.creator.path, 'corpora', str(self.id)) @@ -538,7 +542,8 @@ class Corpus(db.Model): return url_for('corpora.corpus', corpus_id=self.id) def to_dict(self): - return {'url': self.url, + return {'analysis_url': self.analysis_url, + 'url': self.url, 'id': self.id, 'user_id': self.user_id, 'creation_date': self.creation_date.timestamp(), @@ -628,8 +633,10 @@ class QueryResult(db.Model): 'url': self.url, 'id': self.id, 'user_id': self.user_id, + 'corpus_title': self.query_metadata['corpus_name'], 'description': self.description, 'filename': self.filename, + 'query': self.query_metadata['query'], 'query_metadata': self.query_metadata, 'title': self.title} diff --git a/web/app/static/js/nopaque.js b/web/app/static/js/nopaque.js index b006ec25..5e3ad92a 100644 --- a/web/app/static/js/nopaque.js +++ b/web/app/static/js/nopaque.js @@ -27,13 +27,13 @@ nopaque.socket = io({transports: ['websocket']}); nopaque.socket.on("user_data_stream_init", function(msg) { nopaque.user = JSON.parse(msg); for (let subscriber of nopaque.corporaSubscribers) { - subscriber._init(nopaque.user.corpora); + subscriber.init(nopaque.user.corpora); } for (let subscriber of nopaque.jobsSubscribers) { - subscriber._init(nopaque.user.jobs); + subscriber.init(nopaque.user.jobs); } for (let subscriber of nopaque.queryResultsSubscribers) { - subscriber._init(nopaque.user.query_results); + subscriber.init(nopaque.user.query_results); } }); @@ -46,13 +46,13 @@ nopaque.socket.on("user_data_stream_update", function(msg) { jobs_patch = patch.filter(operation => operation.path.startsWith("/jobs")); query_results_patch = patch.filter(operation => operation.path.startsWith("/query_results")); for (let subscriber of nopaque.corporaSubscribers) { - subscriber._update(corpora_patch); + subscriber.update(corpora_patch); } for (let subscriber of nopaque.jobsSubscribers) { - subscriber._update(jobs_patch); + subscriber.update(jobs_patch); } for (let subscriber of nopaque.queryResultsSubscribers) { - subscriber._update(query_results_patch); + subscriber.update(query_results_patch); } if (["all", "end"].includes(nopaque.user.settings.job_status_site_notifications)) { for (operation of jobs_patch) { @@ -69,13 +69,13 @@ nopaque.socket.on("user_data_stream_update", function(msg) { nopaque.socket.on("foreign_user_data_stream_init", function(msg) { nopaque.foreignUser = JSON.parse(msg); for (let subscriber of nopaque.foreignCorporaSubscribers) { - subscriber._init(nopaque.foreignUser.corpora); + subscriber.init(nopaque.foreignUser.corpora); } for (let subscriber of nopaque.foreignJobsSubscribers) { - subscriber._init(nopaque.foreignUser.jobs); + subscriber.init(nopaque.foreignUser.jobs); } for (let subscriber of nopaque.foreignQueryResultsSubscribers) { - subscriber._init(nopaque.foreignUser.query_results); + subscriber.init(nopaque.foreignUser.query_results); } }); @@ -87,9 +87,9 @@ nopaque.socket.on("foreign_user_data_stream_update", function(msg) { corpora_patch = patch.filter(operation => operation.path.startsWith("/corpora")); jobs_patch = patch.filter(operation => operation.path.startsWith("/jobs")); query_results_patch = patch.filter(operation => operation.path.startsWith("/query_results")); - for (let subscriber of nopaque.foreignCorporaSubscribers) {subscriber._update(corpora_patch);} - for (let subscriber of nopaque.foreignJobsSubscribers) {subscriber._update(jobs_patch);} - for (let subscriber of nopaque.foreignQueryResultsSubscribers) {subscriber._update(query_results_patch);} + for (let subscriber of nopaque.foreignCorporaSubscribers) {subscriber.update(corpora_patch);} + for (let subscriber of nopaque.foreignJobsSubscribers) {subscriber.update(jobs_patch);} + for (let subscriber of nopaque.foreignQueryResultsSubscribers) {subscriber.update(query_results_patch);} }); nopaque.Forms = {}; diff --git a/web/app/static/js/nopaque.lists.js b/web/app/static/js/nopaque.lists.js index c907f5be..0ce961ed 100644 --- a/web/app/static/js/nopaque.lists.js +++ b/web/app/static/js/nopaque.lists.js @@ -1,420 +1,140 @@ -class RessourceList extends List { - constructor(idOrElement, subscriberList, type, options) { - if (!type || !["Corpus", "CorpusFile", "Job", "JobInput", "QueryResult", "User"].includes(type)) { - throw "Unknown Type!"; - } - super(idOrElement, {...RessourceList.options['common'], - ...RessourceList.options[type], - ...(options ? options : {})}); - if (subscriberList) {subscriberList.push(this);} - this.type = type; +class RessourceList { + constructor(idOrElement, options = {}) { + this.list = new List(idOrElement, {...RessourceList.options, ...options}); + } + + init(ressources) { + this.list.clear(); + this.add(Object.values(ressources)); + this.list.sort('id', {order: 'desc'}); } - _init(ressources) { - this.clear(); - this._add(Object.values(ressources)); - this.sort("id", {order: "desc"}); - } - - - _update(patch) { + update(patch) { let item, pathArray; for (let operation of patch) { - /* "/{ressourceName}/{ressourceId}/..." -> ["{ressourceId}", "..."] */ - pathArray = operation.path.split("/").slice(2); + /* + * '/{ressourceName}/{ressourceId}/{valueName}' -> ['{ressourceId}', {valueName}] + * Example: '/jobs/1/status' -> ['1', 'status'] + */ + let [id, valueName] = operation.path.split("/").slice(2); switch(operation.op) { - case "add": - if (pathArray.includes("results")) {break;} - this._add([operation.value]); + case 'add': + this.add(operation.value); break; - case "remove": - this.remove("id", pathArray[0]); + case 'remove': + this.remove(id); + break; + case 'replace': + this.replace(id, valueName, operation.value); break; - case "replace": - item = this.get("id", pathArray[0])[0]; - switch(pathArray[1]) { - case "status": - item.values({status: operation.value, - "analyse-link": ["analysing", "prepared", "start analysis"].includes(operation.value) ? `/corpora/${pathArray[0]}/analyse` : ""}); - break; - default: - break; - } default: break; } } } - _add(values, callback) { - this.add(values.map(x => RessourceList.dataMappers[this.type](x)), callback); - // Initialize modal and tooltipped elements in list - M.AutoInit(this.listContainer); + add(values) { + /* WORKAROUND: Set a callback function ('() => {return;}') to force List.js + perform the add method asynchronous. + * https://listjs.com/api/#add + */ + this.list.add(values, () => {return;}); + } + + remove(id) { + this.list.remove('id', id); + } + + replace(id, valueName, newValue) { + if (!this.list.valuesNames.includes(valueName)) {return;} + let item = this.list.get('id', id); + item.values({[valueName]: newValue}); } } +RessourceList.options = {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}; -RessourceList.dataMappers = { - // A data mapper describes entitys rendered per row. One key value pair holds - // the data to be rendered in the list.js table. Key has to correspond - // with the ValueNames defined below in RessourceList.options ValueNames. - // Links are declared with double ticks(") around them. The key for links - // have to correspond with the class of an element in the - // RessourceList.options item blueprint. +class CorpusList extends RessourceList { + constructor(listElementId, options = {}) { + let listElement = document.querySelector(`#${listElementId}`); + super(listElement, {...CorpusList.options, ...options}); + listElement.addEventListener('click', (event) => { + let actionButtonElement = event.target.closest('.action-button'); + if (actionButtonElement === null) {return;} + let corpusId = event.target.closest('tr').dataset.id; + let action = actionButtonElement.dataset.action; + switch (action) { + case 'analyse': + window.location.href = nopaque.user.corpora[corpusId].analysis_url; + } + }); + nopaque.corporaSubscribers.push(this); + } +} - /* ### Corpus mapper ### */ - Corpus: corpus => ({ - creation_date: corpus.creation_date, - description: corpus.description, - id: corpus.id, - link: `/corpora/${corpus.id}`, - status: corpus.status, - title: corpus.title, - title1: corpus.title, - "analyse-link": ["analysing", "prepared", "start analysis"].includes(corpus.status) ? `/corpora/${corpus.id}/analyse` : "", - "delete-link": `/corpora/${corpus.id}/delete`, - "delete-modal": `delete-corpus-${corpus.id}-modal`, - "delete-modal-trigger": `delete-corpus-${corpus.id}-modal`, - }), - /* ### CorpusFile mapper ### TODO: replace delete-modal with delete-onclick */ - CorpusFile: corpus_file => ({ - author: corpus_file.author, - filename: corpus_file.filename, - id: corpus_file.id, - link: `${corpus_file.corpus_id}/files/${corpus_file.id}`, - "publishing-year": corpus_file.publishing_year, - title: corpus_file.title, - title1: corpus_file.title, - "delete-link": `/corpora/${corpus_file.corpus_id}/files/${corpus_file.id}/delete`, - "delete-modal": `delete-corpus-file-${corpus_file.id}-modal`, - "delete-modal-trigger": `delete-corpus-file-${corpus_file.id}-modal`, - "download-link": `${corpus_file.corpus_id}/files/${corpus_file.id}/download`, - }), - /* ### Job mapper ### */ - Job: job => ({ - creation_date: job.creation_date, - description: job.description, - id: job.id, - link: `/jobs/${job.id}`, - service: job.service.name, - status: job.status, - title: job.title, - title1: job.title, - "delete-link": `/jobs/${job.id}/delete`, - "delete-modal": `delete-job-${job.id}-modal`, - "delete-modal-trigger": `delete-job-${job.id}-modal`, - }), - /* ### JobInput mapper ### */ - JobInput: job_input => ({ - filename: job_input.filename, - id: job_input.job_id, - "download-link": `${job_input.job_id}/inputs/${job_input.id}/download` - }), - /* ### QueryResult mapper ### */ - QueryResult: query_result => ({ - corpus_name: query_result.query_metadata.corpus_name, - description: query_result.description, - id: query_result.id, - link: `/corpora/result/${query_result.id}`, - query: query_result.query_metadata.query, - title: query_result.title, - "delete-link": `/corpora/result/${query_result.id}/delete`, - "delete-modal": `delete-query-result-${query_result.id}-modal`, - "delete-modal-trigger": `delete-query-result-${query_result.id}-modal`, - "inspect-link": `/corpora/result/${query_result.id}/inspect`, - }), - /* ### User mapper ### */ - User: user => ({ - confirmed: user.confirmed, - email: user.email, - id: user.id, - link: `users/${user.id}`, - role: user.role.name, - username: user.username, - username2: user.username, - "delete-link": `/admin/users/${user.id}/delete`, - "delete-modal": `delete-user-${user.id}-modal`, - "delete-modal-trigger": `delete-user-${user.id}-modal`, - }), + +CorpusList.options = { + item: ` + book +
                                        + + + delete + edit + search + + `, + valueNames: [{data: ['id']}, {name: "status", attr: "data-status"}, 'description', 'title'] }; -RessourceList.options = { - // common list.js options for 5 rows per page etc. - common: {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}, - // extended list.js options for 10 rows per page etc. - extended: { - page: 10, - pagination: [ - { - name: "paginationTop", - paginationClass: "paginationTop", - innerWindow: 8, - outerWindow: 1 - }, - { - paginationClass: "paginationBottom", - innerWindow: 8, - outerWindow: 1, - }, - ], - }, - /* Type specific List.js options. Usually only "item" and "valueNames" gets - * defined here but it is possible to define other List.js options. - * item: https://listjs.com/api/#item - * valueNames: https://listjs.com/api/#valueNames - */ - Corpus: { - item: ` - - - book - - - -
                                        - - - - - - - - - - `, - valueNames: [ - "creation_date", - "description", - "title", - "title1", - {data: ["id"]}, - {name: "analyse-link", attr: "href"}, - {name: "delete-link", attr: "href"}, - {name: "delete-modal-trigger", attr: "data-target"}, - {name: "delete-modal", attr: "id"}, - {name: "link", attr: "href"}, - {name: "status", attr: "data-status"}, - ] - }, - CorpusFile: { - item: ` - - - - - - - - - `, - valueNames: [ - "author", - "filename", - "publishing-year", - "title", - "title1", - {data: ["id"]}, - {name: "delete-link", attr: "href"}, - {name: "delete-modal-trigger", attr: "data-target"}, - {name: "delete-modal", attr: "id"}, - {name: "download-link", attr: "href"}, - {name: "link", attr: "href"}, - ], - }, - Job: { - item: ` - - - - - - -
                                        - - - - - - - - - - `, - valueNames: [ - "creation_date", - "description", - "title", - "title1", - {data: ["id"]}, - {name: "delete-link", attr: "href"}, - {name: "delete-modal-trigger", attr: "data-target"}, - {name: "delete-modal", attr: "id"}, - {name: "link", attr: "href"}, - {name: "service", attr: "data-service"}, - {name: "status", attr: "data-status"}, - ], - }, - JobInput: { - item : ` - - - - file_download - - - `, - valueNames: [ - "filename", - "id", - {name: "download-link", attr: "href"}, - ], - }, - QueryResult: { - item: ` - -
                                        -
                                        - - -
                                        - - - - - - - `, - valueNames: [ - "corpus_name", - "description", - "query", - "title", - "title2", - {data: ["id"]}, - {name: "delete-link", attr: "href"}, - {name: "delete-modal-trigger", attr: "data-target"}, - {name: "delete-modal", attr: "id"}, - {name: "inspect-link", attr: "href"}, - {name: "link", attr: "href"}, - ], - }, - User: { - item: ` - - - - - - - - - `, - valueNames: [ - "username", - "username2", - "email", - "role", - "id", - {name: "link", attr: "href"}, - {name: "delete-link", attr: "href"}, - {name: "delete-modal-trigger", attr: "data-target"}, - {name: "delete-modal", attr: "id"}, - ], - }, +class JobList extends RessourceList { + constructor(listElementId, options = {}) { + let listElement = document.querySelector(`#${listElementId}`); + super(listElement, {...JobList.options, ...options}); + nopaque.jobsSubscribers.push(this); + } +} + + +JobList.options = { + item: ` + +
                                        + + + delete + send + + `, + valueNames: [{data: ['id']}, {name: 'service', attr: 'data-service'}, {name: "status", attr: "data-status"}, 'description', 'title'] }; -export { RessourceList, }; + +class QueryResultList extends RessourceList { + constructor(listElementId, options = {}) { + let listElement = document.querySelector(`#${listElementId}`); + super(listElement, {...QueryResultList.options, ...options}); + nopaque.queryResultsSubscribers.push(this); + } +} + + +QueryResultList.options = { + item: ` +

                                        +
                                        + + delete + send + search + + `, + valueNames: [{data: ['id']}, 'corpus_title', 'description', 'query', 'title'] +}; + +export { CorpusList, JobList, QueryResultList }; diff --git a/web/app/static/js/nopaque.lists.js.bak b/web/app/static/js/nopaque.lists.js.bak new file mode 100644 index 00000000..c907f5be --- /dev/null +++ b/web/app/static/js/nopaque.lists.js.bak @@ -0,0 +1,420 @@ +class RessourceList extends List { + constructor(idOrElement, subscriberList, type, options) { + if (!type || !["Corpus", "CorpusFile", "Job", "JobInput", "QueryResult", "User"].includes(type)) { + throw "Unknown Type!"; + } + super(idOrElement, {...RessourceList.options['common'], + ...RessourceList.options[type], + ...(options ? options : {})}); + if (subscriberList) {subscriberList.push(this);} + this.type = type; + } + + + _init(ressources) { + this.clear(); + this._add(Object.values(ressources)); + this.sort("id", {order: "desc"}); + } + + + _update(patch) { + let item, pathArray; + + for (let operation of patch) { + /* "/{ressourceName}/{ressourceId}/..." -> ["{ressourceId}", "..."] */ + pathArray = operation.path.split("/").slice(2); + switch(operation.op) { + case "add": + if (pathArray.includes("results")) {break;} + this._add([operation.value]); + break; + case "remove": + this.remove("id", pathArray[0]); + break; + case "replace": + item = this.get("id", pathArray[0])[0]; + switch(pathArray[1]) { + case "status": + item.values({status: operation.value, + "analyse-link": ["analysing", "prepared", "start analysis"].includes(operation.value) ? `/corpora/${pathArray[0]}/analyse` : ""}); + break; + default: + break; + } + default: + break; + } + } + } + + _add(values, callback) { + this.add(values.map(x => RessourceList.dataMappers[this.type](x)), callback); + // Initialize modal and tooltipped elements in list + M.AutoInit(this.listContainer); + } +} + + + + +RessourceList.dataMappers = { + // A data mapper describes entitys rendered per row. One key value pair holds + // the data to be rendered in the list.js table. Key has to correspond + // with the ValueNames defined below in RessourceList.options ValueNames. + // Links are declared with double ticks(") around them. The key for links + // have to correspond with the class of an element in the + // RessourceList.options item blueprint. + + /* ### Corpus mapper ### */ + Corpus: corpus => ({ + creation_date: corpus.creation_date, + description: corpus.description, + id: corpus.id, + link: `/corpora/${corpus.id}`, + status: corpus.status, + title: corpus.title, + title1: corpus.title, + "analyse-link": ["analysing", "prepared", "start analysis"].includes(corpus.status) ? `/corpora/${corpus.id}/analyse` : "", + "delete-link": `/corpora/${corpus.id}/delete`, + "delete-modal": `delete-corpus-${corpus.id}-modal`, + "delete-modal-trigger": `delete-corpus-${corpus.id}-modal`, + }), + /* ### CorpusFile mapper ### TODO: replace delete-modal with delete-onclick */ + CorpusFile: corpus_file => ({ + author: corpus_file.author, + filename: corpus_file.filename, + id: corpus_file.id, + link: `${corpus_file.corpus_id}/files/${corpus_file.id}`, + "publishing-year": corpus_file.publishing_year, + title: corpus_file.title, + title1: corpus_file.title, + "delete-link": `/corpora/${corpus_file.corpus_id}/files/${corpus_file.id}/delete`, + "delete-modal": `delete-corpus-file-${corpus_file.id}-modal`, + "delete-modal-trigger": `delete-corpus-file-${corpus_file.id}-modal`, + "download-link": `${corpus_file.corpus_id}/files/${corpus_file.id}/download`, + }), + /* ### Job mapper ### */ + Job: job => ({ + creation_date: job.creation_date, + description: job.description, + id: job.id, + link: `/jobs/${job.id}`, + service: job.service.name, + status: job.status, + title: job.title, + title1: job.title, + "delete-link": `/jobs/${job.id}/delete`, + "delete-modal": `delete-job-${job.id}-modal`, + "delete-modal-trigger": `delete-job-${job.id}-modal`, + }), + /* ### JobInput mapper ### */ + JobInput: job_input => ({ + filename: job_input.filename, + id: job_input.job_id, + "download-link": `${job_input.job_id}/inputs/${job_input.id}/download` + }), + /* ### QueryResult mapper ### */ + QueryResult: query_result => ({ + corpus_name: query_result.query_metadata.corpus_name, + description: query_result.description, + id: query_result.id, + link: `/corpora/result/${query_result.id}`, + query: query_result.query_metadata.query, + title: query_result.title, + "delete-link": `/corpora/result/${query_result.id}/delete`, + "delete-modal": `delete-query-result-${query_result.id}-modal`, + "delete-modal-trigger": `delete-query-result-${query_result.id}-modal`, + "inspect-link": `/corpora/result/${query_result.id}/inspect`, + }), + /* ### User mapper ### */ + User: user => ({ + confirmed: user.confirmed, + email: user.email, + id: user.id, + link: `users/${user.id}`, + role: user.role.name, + username: user.username, + username2: user.username, + "delete-link": `/admin/users/${user.id}/delete`, + "delete-modal": `delete-user-${user.id}-modal`, + "delete-modal-trigger": `delete-user-${user.id}-modal`, + }), +}; + + +RessourceList.options = { + // common list.js options for 5 rows per page etc. + common: {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}, + // extended list.js options for 10 rows per page etc. + extended: { + page: 10, + pagination: [ + { + name: "paginationTop", + paginationClass: "paginationTop", + innerWindow: 8, + outerWindow: 1 + }, + { + paginationClass: "paginationBottom", + innerWindow: 8, + outerWindow: 1, + }, + ], + }, + /* Type specific List.js options. Usually only "item" and "valueNames" gets + * defined here but it is possible to define other List.js options. + * item: https://listjs.com/api/#item + * valueNames: https://listjs.com/api/#valueNames + */ + Corpus: { + item: ` + + + book + + + +
                                        + + + + + + + + + + `, + valueNames: [ + "creation_date", + "description", + "title", + "title1", + {data: ["id"]}, + {name: "analyse-link", attr: "href"}, + {name: "delete-link", attr: "href"}, + {name: "delete-modal-trigger", attr: "data-target"}, + {name: "delete-modal", attr: "id"}, + {name: "link", attr: "href"}, + {name: "status", attr: "data-status"}, + ] + }, + CorpusFile: { + item: ` + + + + + + + + + `, + valueNames: [ + "author", + "filename", + "publishing-year", + "title", + "title1", + {data: ["id"]}, + {name: "delete-link", attr: "href"}, + {name: "delete-modal-trigger", attr: "data-target"}, + {name: "delete-modal", attr: "id"}, + {name: "download-link", attr: "href"}, + {name: "link", attr: "href"}, + ], + }, + Job: { + item: ` + + + + + + +
                                        + + + + + + + + + + `, + valueNames: [ + "creation_date", + "description", + "title", + "title1", + {data: ["id"]}, + {name: "delete-link", attr: "href"}, + {name: "delete-modal-trigger", attr: "data-target"}, + {name: "delete-modal", attr: "id"}, + {name: "link", attr: "href"}, + {name: "service", attr: "data-service"}, + {name: "status", attr: "data-status"}, + ], + }, + JobInput: { + item : ` + + + + file_download + + + `, + valueNames: [ + "filename", + "id", + {name: "download-link", attr: "href"}, + ], + }, + QueryResult: { + item: ` + +
                                        +
                                        + + +
                                        + + + + + + + `, + valueNames: [ + "corpus_name", + "description", + "query", + "title", + "title2", + {data: ["id"]}, + {name: "delete-link", attr: "href"}, + {name: "delete-modal-trigger", attr: "data-target"}, + {name: "delete-modal", attr: "id"}, + {name: "inspect-link", attr: "href"}, + {name: "link", attr: "href"}, + ], + }, + User: { + item: ` + + + + + + + + + `, + valueNames: [ + "username", + "username2", + "email", + "role", + "id", + {name: "link", attr: "href"}, + {name: "delete-link", attr: "href"}, + {name: "delete-modal-trigger", attr: "data-target"}, + {name: "delete-modal", attr: "id"}, + ], + }, +}; + +export { RessourceList, }; diff --git a/web/app/static/js/nopaque.lists.new.js b/web/app/static/js/nopaque.lists.new.js index 564bae45..8e6ddfce 100644 --- a/web/app/static/js/nopaque.lists.new.js +++ b/web/app/static/js/nopaque.lists.new.js @@ -1,33 +1,33 @@ -class RessourceList extends List { - constructor(idOrElement, options) { - super(idOrElement, {...RessourceList.options['default'], ...(options ? options : {})}); +class RessourceList { + constructor(idOrElement, options = {}) { + this.list = new List(idOrElement, {...RessourceList.options, ...options}); } - _init(ressources) { - this.clear(); - this._add(Object.values(ressources)); - this.sort("id", {order: "desc"}); + init(ressources) { + this.list.clear(); + this.add(Object.values(ressources)); + this.list.sort('id', {order: 'desc'}); } - _update(patch) { + update(patch) { let item, pathArray; for (let operation of patch) { /* - * '/{ressourceName}/{ressourceId}/...' -> ['{ressourceId}', ...] + * '/{ressourceName}/{ressourceId}/{valueName}' -> ['{ressourceId}', {valueName}] * Example: '/jobs/1/status' -> ['1', 'status'] */ - pathArray = operation.path.split("/").slice(2); + let [id, valueName] = operation.path.split("/").slice(2); switch(operation.op) { - case "add": - this.add_handler([operation.value]); + case 'add': + this.add(operation.value); break; - case "remove": - this.remove_handler(pathArray[0]); + case 'remove': + this.remove(id); break; - case "replace": - this.replace_handler(pathArray[0], pathArray[1], operation.value); + case 'replace': + this.replace(id, valueName, operation.value); break; default: break; @@ -35,34 +35,93 @@ class RessourceList extends List { } } - add_handler(values, callback) { - if (this.hasOwnProperty('add_')) { - this.add_(values, callback); - } else { - this.add(values, callback); - } + add(values) { + /* WORKAROUND: Set a callback function ('() => {return;}') to force List.js + perform the add method asynchronous. + * https://listjs.com/api/#add + */ + this.list.add(values, () => {return;}); } - remove_handler(id) { - if (this.hasOwnProperty('remove_')) { - this.remove_(id); - } else { - this.remove(id); - } + remove(id) { + this.list.remove('id', id); } - replace_handler(id, valueName, newValue) { - let item = this.get('id', id); - if (this.hasOwnProperty('add_')) - item.values({valueName: operation.value}); + replace(id, valueName, newValue) { + if (!this.list.valuesNames.includes(valueName)) {return;} + let item = this.list.get('id', id); + item.values({[valueName]: newValue}); } } -RessourceList.options = { - // default RessourceList options - default: {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}, +RessourceList.options = {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}; + + +class CorpusList extends RessourceList { + constructor(idOrElement, options = {}) { + super(idOrElement, {...CorpusList.options, ...options}); + nopaque.corporaSubscribers.push(this); + } +} + + +CorpusList.options = { + item: ` + book +
                                        + + + delete + edit + search + + `, + valueNames: [{data: ['id']}, 'description', 'status', 'title'] }; -export { RessourceList, }; +class JobList extends RessourceList { + constructor(idOrElement, options = {}) { + super(idOrElement, {...JobList.options, ...options}); + nopaque.jobsSubscribers.push(this); + } +} + + +JobList.options = { + item: ` + +
                                        + + + delete + send + + `, + valueNames: [{data: ['id']}, {name: 'service', attr: 'data-service'}, 'description', 'status', 'title'] +}; + + +class QueryResultList extends RessourceList { + constructor(idOrElement, options = {}) { + super(idOrElement, {...QueryResultList.options, ...options}); + nopaque.queryResultsSubscribers.push(this); + } +} + + +QueryResultList.options = { + item: ` +

                                        +
                                        + + delete + send + search + + `, + valueNames: [{data: ['id']}, 'corpus_title', 'description', 'query', 'title'] +}; + +export { CorpusList, JobList, QueryResultList }; diff --git a/web/app/templates/main/dashboard.html.j2 b/web/app/templates/main/dashboard.html.j2 index 56602f8c..e838f72a 100644 --- a/web/app/templates/main/dashboard.html.j2 +++ b/web/app/templates/main/dashboard.html.j2 @@ -176,9 +176,9 @@ {% block scripts %} {{ super() }} {% endblock scripts %} From 231ce24d23e772910516223f6cd609de09a5c15d Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Tue, 15 Dec 2020 14:38:01 +0100 Subject: [PATCH 31/52] Remove unused second backup --- web/app/static/js/nopaque.lists.new.js | 127 ------------------------- 1 file changed, 127 deletions(-) delete mode 100644 web/app/static/js/nopaque.lists.new.js diff --git a/web/app/static/js/nopaque.lists.new.js b/web/app/static/js/nopaque.lists.new.js deleted file mode 100644 index 8e6ddfce..00000000 --- a/web/app/static/js/nopaque.lists.new.js +++ /dev/null @@ -1,127 +0,0 @@ -class RessourceList { - constructor(idOrElement, options = {}) { - this.list = new List(idOrElement, {...RessourceList.options, ...options}); - } - - init(ressources) { - this.list.clear(); - this.add(Object.values(ressources)); - this.list.sort('id', {order: 'desc'}); - } - - - update(patch) { - let item, pathArray; - - for (let operation of patch) { - /* - * '/{ressourceName}/{ressourceId}/{valueName}' -> ['{ressourceId}', {valueName}] - * Example: '/jobs/1/status' -> ['1', 'status'] - */ - let [id, valueName] = operation.path.split("/").slice(2); - switch(operation.op) { - case 'add': - this.add(operation.value); - break; - case 'remove': - this.remove(id); - break; - case 'replace': - this.replace(id, valueName, operation.value); - break; - default: - break; - } - } - } - - add(values) { - /* WORKAROUND: Set a callback function ('() => {return;}') to force List.js - perform the add method asynchronous. - * https://listjs.com/api/#add - */ - this.list.add(values, () => {return;}); - } - - remove(id) { - this.list.remove('id', id); - } - - replace(id, valueName, newValue) { - if (!this.list.valuesNames.includes(valueName)) {return;} - let item = this.list.get('id', id); - item.values({[valueName]: newValue}); - } -} - - -RessourceList.options = {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}; - - -class CorpusList extends RessourceList { - constructor(idOrElement, options = {}) { - super(idOrElement, {...CorpusList.options, ...options}); - nopaque.corporaSubscribers.push(this); - } -} - - -CorpusList.options = { - item: ` - book -
                                        - - - delete - edit - search - - `, - valueNames: [{data: ['id']}, 'description', 'status', 'title'] -}; - - -class JobList extends RessourceList { - constructor(idOrElement, options = {}) { - super(idOrElement, {...JobList.options, ...options}); - nopaque.jobsSubscribers.push(this); - } -} - - -JobList.options = { - item: ` - -
                                        - - - delete - send - - `, - valueNames: [{data: ['id']}, {name: 'service', attr: 'data-service'}, 'description', 'status', 'title'] -}; - - -class QueryResultList extends RessourceList { - constructor(idOrElement, options = {}) { - super(idOrElement, {...QueryResultList.options, ...options}); - nopaque.queryResultsSubscribers.push(this); - } -} - - -QueryResultList.options = { - item: ` -

                                        -
                                        - - delete - send - search - - `, - valueNames: [{data: ['id']}, 'corpus_title', 'description', 'query', 'title'] -}; - -export { CorpusList, JobList, QueryResultList }; From 85385ef7e4b0034b31cf99c58c3bbdba5e0b6083 Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Tue, 15 Dec 2020 14:38:24 +0100 Subject: [PATCH 32/52] bump list.js version --- web/app/static/js/list.min.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/app/static/js/list.min.js b/web/app/static/js/list.min.js index 3cb27373..16247404 100644 --- a/web/app/static/js/list.min.js +++ b/web/app/static/js/list.min.js @@ -1,2 +1,2 @@ -/*! List.js v1.5.0 (http://listjs.com) by Jonny Strömberg (http://javve.com) */ -var List=function(t){function e(n){if(r[n])return r[n].exports;var i=r[n]={i:n,l:!1,exports:{}};return t[n].call(i.exports,i,i.exports,e),i.l=!0,i.exports}var r={};return e.m=t,e.c=r,e.i=function(t){return t},e.d=function(t,r,n){e.o(t,r)||Object.defineProperty(t,r,{configurable:!1,enumerable:!0,get:n})},e.n=function(t){var r=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(r,"a",r),r},e.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},e.p="",e(e.s=11)}([function(t,e,r){function n(t){if(!t||!t.nodeType)throw new Error("A DOM element reference is required");this.el=t,this.list=t.classList}var i=r(4),s=/\s+/;Object.prototype.toString;t.exports=function(t){return new n(t)},n.prototype.add=function(t){if(this.list)return this.list.add(t),this;var e=this.array(),r=i(e,t);return~r||e.push(t),this.el.className=e.join(" "),this},n.prototype.remove=function(t){if(this.list)return this.list.remove(t),this;var e=this.array(),r=i(e,t);return~r&&e.splice(r,1),this.el.className=e.join(" "),this},n.prototype.toggle=function(t,e){return this.list?("undefined"!=typeof e?e!==this.list.toggle(t,e)&&this.list.toggle(t):this.list.toggle(t),this):("undefined"!=typeof e?e?this.add(t):this.remove(t):this.has(t)?this.remove(t):this.add(t),this)},n.prototype.array=function(){var t=this.el.getAttribute("class")||"",e=t.replace(/^\s+|\s+$/g,""),r=e.split(s);return""===r[0]&&r.shift(),r},n.prototype.has=n.prototype.contains=function(t){return this.list?this.list.contains(t):!!~i(this.array(),t)}},function(t,e,r){var n=window.addEventListener?"addEventListener":"attachEvent",i=window.removeEventListener?"removeEventListener":"detachEvent",s="addEventListener"!==n?"on":"",a=r(5);e.bind=function(t,e,r,i){t=a(t);for(var o=0;o0?setTimeout(function(){e(r,n,i)},1):(t.update(),n(i))};return e}},function(t,e){t.exports=function(t){return t.handlers.filterStart=t.handlers.filterStart||[],t.handlers.filterComplete=t.handlers.filterComplete||[],function(e){if(t.trigger("filterStart"),t.i=1,t.reset.filter(),void 0===e)t.filtered=!1;else{t.filtered=!0;for(var r=t.items,n=0,i=r.length;nv.page,a=new m(t[i],void 0,n),v.items.push(a),r.push(a)}return v.update(),r}},this.show=function(t,e){return this.i=t,this.page=e,v.update(),v},this.remove=function(t,e,r){for(var n=0,i=0,s=v.items.length;i-1&&r.splice(n,1),v},this.trigger=function(t){for(var e=v.handlers[t].length;e--;)v.handlers[t][e](v);return v},this.reset={filter:function(){for(var t=v.items,e=t.length;e--;)t[e].filtered=!1;return v},search:function(){for(var t=v.items,e=t.length;e--;)t[e].found=!1;return v}},this.update=function(){var t=v.items,e=t.length;v.visibleItems=[],v.matchingItems=[],v.templater.clear();for(var r=0;r=v.i&&v.visibleItems.lengthe},innerWindow:function(t,e,r){return t>=e-r&&t<=e+r},dotted:function(t,e,r,n,i,s,a){return this.dottedLeft(t,e,r,n,i,s)||this.dottedRight(t,e,r,n,i,s,a)},dottedLeft:function(t,e,r,n,i,s){return e==r+1&&!this.innerWindow(e,i,s)&&!this.right(e,n)},dottedRight:function(t,e,r,n,i,s,a){return!t.items[a-1].values().dotted&&(e==n&&!this.innerWindow(e,i,s)&&!this.right(e,n))}},a=function(e,r,n){i.bind(e,"click",function(){t.show((r-1)*n+1,n)})};return function(r){var n=new s(t.listContainer.id,{listClass:r.paginationClass||"pagination",item:"
                                      • ",valueNames:["page","dotted"],searchClass:"pagination-search-that-is-not-supposed-to-exist",sortClass:"pagination-sort-that-is-not-supposed-to-exist"});t.on("updated",function(){e(n,r)}),e(n,r)}}},function(t,e,r){t.exports=function(t){var e=r(2)(t),n=function(t){for(var e=t.childNodes,r=[],n=0,i=e.length;n0?setTimeout(function(){s(e,r)},1):(t.update(),t.trigger("parseComplete"))};return t.handlers.parseComplete=t.handlers.parseComplete||[],function(){var e=n(t.list),r=t.valueNames;t.indexAsync?s(e,r):i(e,r)}}},function(t,e){t.exports=function(t){var e,r,n,i,s={resetList:function(){t.i=1,t.templater.clear(),i=void 0},setOptions:function(t){2==t.length&&t[1]instanceof Array?r=t[1]:2==t.length&&"function"==typeof t[1]?(r=void 0,i=t[1]):3==t.length?(r=t[1],i=t[2]):r=void 0},setColumns:function(){0!==t.items.length&&void 0===r&&(r=void 0===t.searchColumns?s.toArray(t.items[0].values()):t.searchColumns)},setSearchString:function(e){e=t.utils.toString(e).toLowerCase(),e=e.replace(/[-[\]{}()*+?.,\\^$|#]/g,"\\$&"),n=e},toArray:function(t){var e=[];for(var r in t)e.push(r);return e}},a={list:function(){for(var e=0,r=t.items.length;e-1))},reset:function(){t.reset.search(),t.searched=!1}},o=function(e){return t.trigger("searchStart"),s.resetList(),s.setSearchString(e),s.setOptions(arguments),s.setColumns(),""===n?a.reset():(t.searched=!0,i?i(n,r):a.list()),t.update(),t.trigger("searchComplete"),t.visibleItems};return t.handlers.searchStart=t.handlers.searchStart||[],t.handlers.searchComplete=t.handlers.searchComplete||[],t.utils.events.bind(t.utils.getByClass(t.listContainer,t.searchClass),"keyup",function(e){var r=e.target||e.srcElement,n=""===r.value&&!t.searched;n||o(r.value)}),t.utils.events.bind(t.utils.getByClass(t.listContainer,t.searchClass),"input",function(t){var e=t.target||t.srcElement;""===e.value&&o("")}),o}},function(t,e){t.exports=function(t){var e={els:void 0,clear:function(){for(var r=0,n=e.els.length;r]/g.exec(e)){var s=document.createElement("tbody");return s.innerHTML=e,s.firstChild}if(e.indexOf("<")!==-1){var a=document.createElement("div");return a.innerHTML=e,a.firstChild}var o=document.getElementById(t.item);if(o)return o}},this.get=function(e,n){r.create(e);for(var i={},s=0,a=n.length;s=1;)t.list.removeChild(t.list.firstChild)},n()};t.exports=function(t){return new r(t)}},function(t,e){t.exports=function(t,e){var r=t.getAttribute&&t.getAttribute(e)||null;if(!r)for(var n=t.attributes,i=n.length,s=0;s=48&&t<=57}function i(t,e){for(var r=(t+="").length,i=(e+="").length,s=0,l=0;s32)return!1;var o=i,l=function(){var t,r={};for(t=0;t=p;b--){var w=l[t.charAt(b-1)];if(0===g?y[b]=(y[b+1]<<1|1)&w:y[b]=(y[b+1]<<1|1)&w|((v[b+1]|v[b])<<1|1)|v[b+1],y[b]&f){var x=n(g,b-1);if(x<=u){if(u=x,c=b-1,!(c>o))break;p=Math.max(1,2*o-c)}}}if(n(g+1,o)>u)break;v=y}return!(c<0)}}]); \ No newline at end of file +var List;List=function(){var t={"./src/add-async.js":function(t){t.exports=function(t){return function e(r,n,s){var i=r.splice(0,50);s=(s=s||[]).concat(t.add(i)),r.length>0?setTimeout((function(){e(r,n,s)}),1):(t.update(),n(s))}}},"./src/filter.js":function(t){t.exports=function(t){return t.handlers.filterStart=t.handlers.filterStart||[],t.handlers.filterComplete=t.handlers.filterComplete||[],function(e){if(t.trigger("filterStart"),t.i=1,t.reset.filter(),void 0===e)t.filtered=!1;else{t.filtered=!0;for(var r=t.items,n=0,s=r.length;nv.page,a=new g(t[s],void 0,n),v.items.push(a),r.push(a)}return v.update(),r}m(t.slice(0),e)}},this.show=function(t,e){return this.i=t,this.page=e,v.update(),v},this.remove=function(t,e,r){for(var n=0,s=0,i=v.items.length;s-1&&r.splice(n,1),v},this.trigger=function(t){for(var e=v.handlers[t].length;e--;)v.handlers[t][e](v);return v},this.reset={filter:function(){for(var t=v.items,e=t.length;e--;)t[e].filtered=!1;return v},search:function(){for(var t=v.items,e=t.length;e--;)t[e].found=!1;return v}},this.update=function(){var t=v.items,e=t.length;v.visibleItems=[],v.matchingItems=[],v.templater.clear();for(var r=0;r=v.i&&v.visibleItems.lengthe},innerWindow:function(t,e,r){return t>=e-r&&t<=e+r},dotted:function(t,e,r,n,s,i,a){return this.dottedLeft(t,e,r,n,s,i)||this.dottedRight(t,e,r,n,s,i,a)},dottedLeft:function(t,e,r,n,s,i){return e==r+1&&!this.innerWindow(e,s,i)&&!this.right(e,n)},dottedRight:function(t,e,r,n,s,i,a){return!t.items[a-1].values().dotted&&(e==n&&!this.innerWindow(e,s,i)&&!this.right(e,n))}};return function(e){var n=new i(t.listContainer.id,{listClass:e.paginationClass||"pagination",item:e.item||"
                                      • ",valueNames:["page","dotted"],searchClass:"pagination-search-that-is-not-supposed-to-exist",sortClass:"pagination-sort-that-is-not-supposed-to-exist"});s.bind(n.listContainer,"click",(function(e){var r=e.target||e.srcElement,n=t.utils.getAttribute(r,"data-page"),s=t.utils.getAttribute(r,"data-i");s&&t.show((s-1)*n+1,n)})),t.on("updated",(function(){r(n,e)})),r(n,e)}}},"./src/parse.js":function(t,e,r){t.exports=function(t){var e=r("./src/item.js")(t),n=function(r,n){for(var s=0,i=r.length;s0?setTimeout((function(){e(r,s)}),1):(t.update(),t.trigger("parseComplete"))};return t.handlers.parseComplete=t.handlers.parseComplete||[],function(){var e=function(t){for(var e=t.childNodes,r=[],n=0,s=e.length;n]/g.exec(t)){var e=document.createElement("tbody");return e.innerHTML=t,e.firstElementChild}if(-1!==t.indexOf("<")){var r=document.createElement("div");return r.innerHTML=t,r.firstElementChild}}},a=function(e,r,n){var s=void 0,i=function(e){for(var r=0,n=t.valueNames.length;r=1;)t.list.removeChild(t.list.firstChild)},function(){var r;if("function"!=typeof t.item){if(!(r="string"==typeof t.item?-1===t.item.indexOf("<")?document.getElementById(t.item):i(t.item):s()))throw new Error("The list needs to have at least one item on init otherwise you'll have to add a template.");r=n(r,t.valueNames),e=function(){return r.cloneNode(!0)}}else e=function(e){var r=t.item(e);return i(r)}}()};t.exports=function(t){return new e(t)}},"./src/utils/classes.js":function(t,e,r){var n=r("./src/utils/index-of.js"),s=/\s+/;Object.prototype.toString;function i(t){if(!t||!t.nodeType)throw new Error("A DOM element reference is required");this.el=t,this.list=t.classList}t.exports=function(t){return new i(t)},i.prototype.add=function(t){if(this.list)return this.list.add(t),this;var e=this.array();return~n(e,t)||e.push(t),this.el.className=e.join(" "),this},i.prototype.remove=function(t){if(this.list)return this.list.remove(t),this;var e=this.array(),r=n(e,t);return~r&&e.splice(r,1),this.el.className=e.join(" "),this},i.prototype.toggle=function(t,e){return this.list?(void 0!==e?e!==this.list.toggle(t,e)&&this.list.toggle(t):this.list.toggle(t),this):(void 0!==e?e?this.add(t):this.remove(t):this.has(t)?this.remove(t):this.add(t),this)},i.prototype.array=function(){var t=(this.el.getAttribute("class")||"").replace(/^\s+|\s+$/g,"").split(s);return""===t[0]&&t.shift(),t},i.prototype.has=i.prototype.contains=function(t){return this.list?this.list.contains(t):!!~n(this.array(),t)}},"./src/utils/events.js":function(t,e,r){var n=window.addEventListener?"addEventListener":"attachEvent",s=window.removeEventListener?"removeEventListener":"detachEvent",i="addEventListener"!==n?"on":"",a=r("./src/utils/to-array.js");e.bind=function(t,e,r,s){for(var o=0,l=(t=a(t)).length;o32)return!1;var a=n,o=function(){var t,r={};for(t=0;t=p;b--){var j=o[t.charAt(b-1)];if(C[b]=0===m?(C[b+1]<<1|1)&j:(C[b+1]<<1|1)&j|(v[b+1]|v[b])<<1|1|v[b+1],C[b]&d){var x=l(m,b-1);if(x<=u){if(u=x,!((c=b-1)>a))break;p=Math.max(1,2*a-c)}}}if(l(m+1,a)>u)break;v=C}return!(c<0)}},"./src/utils/get-attribute.js":function(t){t.exports=function(t,e){var r=t.getAttribute&&t.getAttribute(e)||null;if(!r)for(var n=t.attributes,s=n.length,i=0;i=48&&t<=57}function i(t,e){for(var i=(t+="").length,a=(e+="").length,o=0,l=0;o=i&&l=a?-1:l>=a&&o=i?1:i-a}i.caseInsensitive=i.i=function(t,e){return i((""+t).toLowerCase(),(""+e).toLowerCase())},Object.defineProperties(i,{alphabet:{get:function(){return e},set:function(t){r=[];var s=0;if(e=t)for(;s Date: Tue, 15 Dec 2020 14:38:52 +0100 Subject: [PATCH 33/52] Rework list handling --- web/app/events.py | 40 ++-- web/app/static/css/nopaque.css | 4 + web/app/static/js/nopaque.js | 239 ++++++++++++---------- web/app/static/js/nopaque.lists.js | 248 +++++++++++++++++------ web/app/templates/admin/user.html.j2 | 21 +- web/app/templates/main/dashboard.html.j2 | 13 +- web/app/templates/nopaque.html.j2 | 21 +- 7 files changed, 354 insertions(+), 232 deletions(-) diff --git a/web/app/events.py b/web/app/events.py index df716d81..a0f76b3b 100644 --- a/web/app/events.py +++ b/web/app/events.py @@ -33,38 +33,24 @@ def disconnect(): connected_sessions.remove(request.sid) -@socketio.on('user_data_stream_init') +@socketio.on('start_user_session') @socketio_login_required -def user_data_stream_init(): - socketio.start_background_task(user_data_stream, +def start_user_session(user_id): + if not (current_user.id == user_id or current_user.is_administrator): + return + socketio.start_background_task(user_session, current_app._get_current_object(), - current_user.id, request.sid) + user_id, request.sid) -@socketio.on('foreign_user_data_stream_init') -@socketio_login_required -@socketio_admin_required -def foreign_user_data_stream_init(user_id): - socketio.start_background_task(user_data_stream, - current_app._get_current_object(), - user_id, request.sid, foreign=True) - - -def user_data_stream(app, user_id, session_id, foreign=False): +def user_session(app, user_id, session_id): ''' - ' Sends initial corpus and job lists to the client. Afterwards it checks - ' every 3 seconds if changes to the initial values appeared. If changes are - ' detected, a RFC 6902 compliant JSON patch gets send. - ' - ' NOTE: The initial values are send as a init events. - ' The JSON patches are send as update events. + ' Sends initial user data to the client. Afterwards it checks every 3s if + ' changes to the initial values appeared. If changes are detected, a + ' RFC 6902 compliant JSON patch gets send. ''' - if foreign: - init_event = 'foreign_user_data_stream_init' - update_event = 'foreign_user_data_stream_update' - else: - init_event = 'user_data_stream_init' - update_event = 'user_data_stream_update' + init_event = 'user_{}_init'.format(user_id) + patch_event = 'user_{}_patch'.format(user_id) with app.app_context(): # Gather current values from database. user = User.query.get(user_id) @@ -80,7 +66,7 @@ def user_data_stream(app, user_id, session_id, foreign=False): new_user_dict) # In case there are patches, send them to the client. if user_patch: - socketio.emit(update_event, user_patch.to_string(), + socketio.emit(patch_event, user_patch.to_string(), room=session_id) # Set new values as references for the next iteration. user_dict = new_user_dict diff --git a/web/app/static/css/nopaque.css b/web/app/static/css/nopaque.css index 3ad3a914..597701aa 100644 --- a/web/app/static/css/nopaque.css +++ b/web/app/static/css/nopaque.css @@ -8,6 +8,10 @@ main { margin-top: 48px; } +table.ressource-list tr { + cursor: pointer; +} + .parallax-container .parallax { z-index: auto; } diff --git a/web/app/static/js/nopaque.js b/web/app/static/js/nopaque.js index 5e3ad92a..bd5e3f4e 100644 --- a/web/app/static/js/nopaque.js +++ b/web/app/static/js/nopaque.js @@ -1,96 +1,138 @@ +class AppClient { + constructor(currentUserId) { + this.socket = io({transports: ['websocket']}); + this.users = {}; + this.users.self = this.loadUser(currentUserId); + } + + loadUser(userId) { + let user = new User(); + this.users[userId] = user; + this.socket.on(`user_${userId}_init`, msg => user.init(JSON.parse(msg))); + this.socket.on(`user_${userId}_patch`, msg => user.patch(JSON.parse(msg))); + this.socket.emit('start_user_session', userId); + return user; + } +} + + +class User { + constructor() { + this.data = undefined; + this.eventListeners = { + corporaInit: [], + corporaPatch: [], + jobsInit: [], + jobsPatch: [], + queryResultsInit: [], + queryResultsPatch: [] + }; + } + + init(data) { + this.data = data; + + let listener; + for (listener of this.eventListeners.corporaInit) { + listener(this.data.corpora); + } + for (listener of this.eventListeners.jobsInit) { + listener(this.data.jobs); + } + for (listener of this.eventListeners.queryResultsInit) { + listener(this.data.query_results); + } + } + + patch(patch) { + this.data = jsonpatch.apply_patch(this.data, patch); + + let corporaPatch = patch.filter(operation => operation.path.startsWith("/corpora")); + let jobsPatch = patch.filter(operation => operation.path.startsWith("/jobs")); + let queryResultsPatch = patch.filter(operation => operation.path.startsWith("/query_results")); + + for (let listener of this.eventListeners.corporaPatch) { + if (corporaPatch.length > 0) {listener(corporaPatch);} + } + for (let listener of this.eventListeners.jobsPatch) { + if (jobsPatch.length > 0) {listener(jobsPatch);} + } + for (let listener of this.eventListeners.queryResultsPatch) { + if (queryResultsPatch.length > 0) {listener(queryResultsPatch);} + } + + for (let operation of jobsPatch) { + if (operation.op !== 'replace') {continue;} + // Matches the only path that should be handled here: /jobs/{jobId}/status + if (/^\/jobs\/(\d+)\/status$/.test(operation.path)) { + let [match, jobId] = operation.path.match(/^\/jobs\/(\d+)\/status$/); + if (this.data.settings.job_status_site_notifications === "end" && !["complete", "failed"].includes(operation.value)) {continue;} + nopaque.flash(`[${this.data.jobs[jobId].title}] New status: ${operation.value}`, "job"); + } + } + } + + addEventListener(type, listener) { + switch (type) { + case 'corporaInit': + this.eventListeners.corporaInit.push(listener); + if (this.data !== undefined) {listener(this.data.corpora);} + break; + case 'corporaPatch': + this.eventListeners.corporaPatch.push(listener); + break; + case 'jobsInit': + this.eventListeners.jobsInit.push(listener); + if (this.data !== undefined) {listener(this.data.jobs);} + break; + case 'jobsPatch': + this.eventListeners.jobsPatch.push(listener); + break; + case 'queryResultsInit': + this.eventListeners.queryResultsInit.push(listener); + if (this.data !== undefined) {listener(this.data.query_results);} + break; + case 'queryResultsPatch': + this.eventListeners.queryResultsPatch.push(listener); + break; + default: + console.error(`Unknown event type: ${type}`); + } + } +} + + /* * The nopaque object is used as a namespace for nopaque specific functions and * variables. */ var nopaque = {}; -// User data -nopaque.user = {}; -nopaque.user.settings = {}; -nopaque.user.settings.darkMode = undefined; -nopaque.corporaSubscribers = []; -nopaque.jobsSubscribers = []; -nopaque.queryResultsSubscribers = []; +nopaque.flash = function(message, category) { + let toast; + let toastActionElement; -// Foreign user (user inspected with admin credentials) data -nopaque.foreignUser = {}; -nopaque.foreignUser.isAuthenticated = undefined; -nopaque.foreignUser.settings = {}; -nopaque.foreignUser.settings.darkMode = undefined; -nopaque.foreignCorporaSubscribers = []; -nopaque.foreignJobsSubscribers = []; -nopaque.foreignQueryResultsSubscribers = []; + switch (category) { + case "corpus": + message = `book${message}`; + break; + case "error": + message = `error${message}`; + break; + case "job": + message = `work${message}`; + break; + default: + message = `notifications${message}`; + } -// nopaque functions -nopaque.socket = io({transports: ['websocket']}); -// Add event handlers -nopaque.socket.on("user_data_stream_init", function(msg) { - nopaque.user = JSON.parse(msg); - for (let subscriber of nopaque.corporaSubscribers) { - subscriber.init(nopaque.user.corpora); - } - for (let subscriber of nopaque.jobsSubscribers) { - subscriber.init(nopaque.user.jobs); - } - for (let subscriber of nopaque.queryResultsSubscribers) { - subscriber.init(nopaque.user.query_results); - } -}); - -nopaque.socket.on("user_data_stream_update", function(msg) { - var patch; - - patch = JSON.parse(msg); - nopaque.user = jsonpatch.apply_patch(nopaque.user, patch); - corpora_patch = patch.filter(operation => operation.path.startsWith("/corpora")); - jobs_patch = patch.filter(operation => operation.path.startsWith("/jobs")); - query_results_patch = patch.filter(operation => operation.path.startsWith("/query_results")); - for (let subscriber of nopaque.corporaSubscribers) { - subscriber.update(corpora_patch); - } - for (let subscriber of nopaque.jobsSubscribers) { - subscriber.update(jobs_patch); - } - for (let subscriber of nopaque.queryResultsSubscribers) { - subscriber.update(query_results_patch); - } - if (["all", "end"].includes(nopaque.user.settings.job_status_site_notifications)) { - for (operation of jobs_patch) { - /* "/jobs/{jobId}/..." -> ["{jobId}", ...] */ - pathArray = operation.path.split("/").slice(2); - if (operation.op === "replace" && pathArray[1] === "status") { - if (nopaque.user.settings.job_status_site_notifications === "end" && !["complete", "failed"].includes(operation.value)) {continue;} - nopaque.flash(`[${nopaque.user.jobs[pathArray[0]].title}] New status: ${operation.value}`, "job"); - } - } - } -}); - -nopaque.socket.on("foreign_user_data_stream_init", function(msg) { - nopaque.foreignUser = JSON.parse(msg); - for (let subscriber of nopaque.foreignCorporaSubscribers) { - subscriber.init(nopaque.foreignUser.corpora); - } - for (let subscriber of nopaque.foreignJobsSubscribers) { - subscriber.init(nopaque.foreignUser.jobs); - } - for (let subscriber of nopaque.foreignQueryResultsSubscribers) { - subscriber.init(nopaque.foreignUser.query_results); - } -}); - -nopaque.socket.on("foreign_user_data_stream_update", function(msg) { - var patch; - - patch = JSON.parse(msg); - nopaque.foreignUser = jsonpatch.apply_patch(nopaque.foreignUser, patch); - corpora_patch = patch.filter(operation => operation.path.startsWith("/corpora")); - jobs_patch = patch.filter(operation => operation.path.startsWith("/jobs")); - query_results_patch = patch.filter(operation => operation.path.startsWith("/query_results")); - for (let subscriber of nopaque.foreignCorporaSubscribers) {subscriber.update(corpora_patch);} - for (let subscriber of nopaque.foreignJobsSubscribers) {subscriber.update(jobs_patch);} - for (let subscriber of nopaque.foreignQueryResultsSubscribers) {subscriber.update(query_results_patch);} -}); + toast = M.toast({html: `${message} + `}); + toastActionElement = toast.el.querySelector('.toast-action[data-action="close"]'); + toastActionElement.addEventListener('click', () => {toast.dismiss();}); +}; nopaque.Forms = {}; nopaque.Forms.init = function() { @@ -163,30 +205,3 @@ nopaque.Forms.init = function() { } } } - - -nopaque.flash = function(message, category) { - let toast; - let toastActionElement; - - switch (category) { - case "corpus": - message = `book${message}`; - break; - case "error": - message = `error${message}`; - break; - case "job": - message = `work${message}`; - break; - default: - message = `notifications${message}`; - } - - toast = M.toast({html: `${message} - `}); - toastActionElement = toast.el.querySelector('.toast-action[data-action="close"]'); - toastActionElement.addEventListener('click', () => {toast.dismiss();}); -} diff --git a/web/app/static/js/nopaque.lists.js b/web/app/static/js/nopaque.lists.js index 0ce961ed..36ea8b48 100644 --- a/web/app/static/js/nopaque.lists.js +++ b/web/app/static/js/nopaque.lists.js @@ -1,6 +1,33 @@ class RessourceList { - constructor(idOrElement, options = {}) { - this.list = new List(idOrElement, {...RessourceList.options, ...options}); + /* A wrapper class for the list.js list. + * This class is not meant to be used directly, instead it should be used as + * a template for concrete ressource list implementations. + */ + constructor(listElement, options = {}) { + if (listElement.dataset.userId) { + if (listElement.dataset.userId in nopaque.appClient.users) { + this.user = nopaque.appClient.users[listElement.dataset.userId]; + } else { + console.error(`User not found: ${listElement.dataset.userId}`); + return; + } + } else { + this.user = nopaque.appClient.users.self; + } + this.list = new List(listElement, {...RessourceList.options, ...options}); + this.valueNames = ['id']; + for (let element of this.list.valueNames) { + switch (typeof element) { + case 'object': + if (element.hasOwnProperty('name')) {this.valueNames.push(element.name);} + break; + case 'string': + this.valueNames.push(element); + break; + default: + console.error(`Unknown value name definition: ${element}`); + } + } } init(ressources) { @@ -9,38 +36,27 @@ class RessourceList { this.list.sort('id', {order: 'desc'}); } - - update(patch) { - let item, pathArray; - - for (let operation of patch) { - /* - * '/{ressourceName}/{ressourceId}/{valueName}' -> ['{ressourceId}', {valueName}] - * Example: '/jobs/1/status' -> ['1', 'status'] - */ - let [id, valueName] = operation.path.split("/").slice(2); - switch(operation.op) { - case 'add': - this.add(operation.value); - break; - case 'remove': - this.remove(id); - break; - case 'replace': - this.replace(id, valueName, operation.value); - break; - default: - break; - } - } + patch(patch) { + /* + * It's not possible to generalize a patch Handler for all type of + * ressources. So this method is meant to be an interface. + */ + console.error('patch method not implemented!'); } add(values) { - /* WORKAROUND: Set a callback function ('() => {return;}') to force List.js - perform the add method asynchronous. - * https://listjs.com/api/#add - */ - this.list.add(values, () => {return;}); + let ressources = Array.isArray(values) ? values : [values]; + // Discard ressource values, that are not defined to be used in the list. + ressources = ressources.map(ressource => { + let cleanedRessource = {}; + for (let [valueName, value] of Object.entries(ressource)) { + if (this.valueNames.includes(valueName)) {cleanedRessource[valueName] = value;} + } + return cleanedRessource; + }); + // Set a callback function ('() => {return;}') to force List.js perform the + // add method asynchronous: https://listjs.com/api/#add + this.list.add(ressources, () => {return;}); } remove(id) { @@ -48,35 +64,84 @@ class RessourceList { } replace(id, valueName, newValue) { - if (!this.list.valuesNames.includes(valueName)) {return;} - let item = this.list.get('id', id); - item.values({[valueName]: newValue}); + if (this.valueNames.includes(valueName)) { + let item = this.list.get('id', id)[0]; + item.values({[valueName]: newValue}); + } } } - - RessourceList.options = {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}; class CorpusList extends RessourceList { - constructor(listElementId, options = {}) { - let listElement = document.querySelector(`#${listElementId}`); + constructor(listElement, options = {}) { super(listElement, {...CorpusList.options, ...options}); - listElement.addEventListener('click', (event) => { - let actionButtonElement = event.target.closest('.action-button'); - if (actionButtonElement === null) {return;} - let corpusId = event.target.closest('tr').dataset.id; - let action = actionButtonElement.dataset.action; - switch (action) { - case 'analyse': - window.location.href = nopaque.user.corpora[corpusId].analysis_url; + this.user.addEventListener('corporaInit', corpora => this.init(corpora)); + this.user.addEventListener('corporaPatch', patch => this.patch(patch)); + listElement.addEventListener('click', (event) => {this.onclick(event)}); + } + + onclick(event) { + let corpusId = event.target.closest('tr').dataset.id; + let actionButtonElement = event.target.closest('.action-button'); + let action = actionButtonElement === null ? 'view' : actionButtonElement.dataset.action; + switch (action) { + case 'analyse': + window.location.href = nopaque.user.corpora[corpusId].analysis_url; + case 'delete': + let deleteModalHTML = ``; + let deleteModalParentElement = document.querySelector('main'); + deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML); + let deleteModalElement = deleteModalParentElement.lastChild; + let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}}); + deleteModal.open(); + break; + case 'view': + // TODO: handle unprepared corpora + window.location.href = nopaque.user.corpora[corpusId].url; + break; + default: + console.error(`Unknown action: ${action}`); + break; + } + } + + patch(patch) { + for (let operation of patch) { + switch(operation.op) { + case 'add': + // Matches the only paths that should be handled here: /corpora/{corpusId} + if (/^\/corpora\/(\d+)$/.test(operation.path)) {this.add(operation.value);} + break; + case 'remove': + // See case 'add' ;) + if (/^\/corpora\/(\d+)$/.test(operation.path)) { + let [match, id] = operation.path.match(/^\/corpora\/(\d+)$/); + this.remove(corpusId); + } + break; + case 'replace': + // Matches the only paths that should be handled here: /corpora/{corpusId}/{status || description || title} + if (/^\/corpora\/(\d+)\/(status|description|title)$/.test(operation.path)) { + let [match, id, valueName] = operation.path.match(/^\/corpora\/(\d+)\/(status|description|title)$/); + this.replace(id, valueName, operation.value); + } + break; + default: + break; } - }); - nopaque.corporaSubscribers.push(this); + } } } - - CorpusList.options = { item: ` book @@ -84,23 +149,80 @@ CorpusList.options = { delete - edit search + send `, - valueNames: [{data: ['id']}, {name: "status", attr: "data-status"}, 'description', 'title'] + valueNames: [{data: ['id']}, {name: 'status', attr: 'data-status'}, 'description', 'title'] }; class JobList extends RessourceList { - constructor(listElementId, options = {}) { - let listElement = document.querySelector(`#${listElementId}`); + constructor(listElement, options = {}) { super(listElement, {...JobList.options, ...options}); - nopaque.jobsSubscribers.push(this); + this.user.addEventListener('jobsInit', jobs => this.init(jobs)); + this.user.addEventListener('jobsPatch', patch => this.patch(patch)); + listElement.addEventListener('click', (event) => {this.onclick(event)}); + } + + onclick(event) { + let jobId = event.target.closest('tr').dataset.id; + let actionButtonElement = event.target.closest('.action-button'); + let action = actionButtonElement === null ? 'view' : actionButtonElement.dataset.action; + switch (action) { + case 'delete': + let deleteModalHTML = ``; + let deleteModalParentElement = document.querySelector('main'); + deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML); + let deleteModalElement = deleteModalParentElement.lastChild; + let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}}); + deleteModal.open(); + break; + case 'view': + window.location.href = this.user.data.jobs[jobId].url; + break; + default: + console.error(`Unknown action: "${action}"`); + break; + } + } + + patch(patch) { + for (let operation of patch) { + switch(operation.op) { + case 'add': + // Matches the only paths that should be handled here: /jobs/{jobId} + if (/^\/jobs\/(\d+)$/.test(operation.path)) {this.add(operation.value);} + break; + case 'remove': + // See case add ;) + if (/^\/jobs\/(\d+)$/.test(operation.path)) { + let [match, id] = operation.path.match(/^\/jobs\/(\d+)$/); + this.remove(jobId); + } + break; + case 'replace': + // Matches the only paths that should be handled here: /jobs/{jobId}/{service || status || description || title} + if (/^\/jobs\/(\d+)\/(service|status|description|title)$/.test(operation.path)) { + let [match, id, valueName] = operation.path.match(/^\/jobs\/(\d+)\/(service|status|description|title)$/); + this.replace(id, valueName, operation.value); + } + break; + default: + break; + } + } } } - - JobList.options = { item: ` @@ -111,19 +233,17 @@ JobList.options = { send `, - valueNames: [{data: ['id']}, {name: 'service', attr: 'data-service'}, {name: "status", attr: "data-status"}, 'description', 'title'] + valueNames: [{data: ['id']}, {name: 'service', attr: 'data-service'}, {name: 'status', attr: 'data-status'}, 'description', 'title'] }; class QueryResultList extends RessourceList { - constructor(listElementId, options = {}) { - let listElement = document.querySelector(`#${listElementId}`); + constructor(listElement, options = {}) { super(listElement, {...QueryResultList.options, ...options}); - nopaque.queryResultsSubscribers.push(this); + this.user.addEventListener('queryResultsInit', queryResults => this.init(queryResults)); + this.user.addEventListener('queryResultsPatch', patch => this.init(patch)); } } - - QueryResultList.options = { item: `

                                        @@ -136,5 +256,3 @@ QueryResultList.options = { `, valueNames: [{data: ['id']}, 'corpus_title', 'description', 'query', 'title'] }; - -export { CorpusList, JobList, QueryResultList }; diff --git a/web/app/templates/admin/user.html.j2 b/web/app/templates/admin/user.html.j2 index 97e3137a..78351735 100644 --- a/web/app/templates/admin/user.html.j2 +++ b/web/app/templates/admin/user.html.j2 @@ -36,16 +36,16 @@ -
                                        +

                                        Corpora

                                        -
                                        +
                                        search
                                        - +
                                        @@ -64,16 +64,16 @@ -
                                        +

                                        Jobs

                                        -
                                        +
                                        search
                                        -
                                        +
                                        @@ -109,10 +109,9 @@ {% block scripts %} {{ super() }} - {% endblock scripts %} diff --git a/web/app/templates/main/dashboard.html.j2 b/web/app/templates/main/dashboard.html.j2 index e838f72a..8326654a 100644 --- a/web/app/templates/main/dashboard.html.j2 +++ b/web/app/templates/main/dashboard.html.j2 @@ -29,7 +29,7 @@ -
                                        Service
                                        +
                                        @@ -102,7 +102,7 @@ -
                                        +
                                        @@ -175,10 +175,9 @@ {% block scripts %} {{ super() }} - {% endblock scripts %} diff --git a/web/app/templates/nopaque.html.j2 b/web/app/templates/nopaque.html.j2 index a54fda33..804e8776 100644 --- a/web/app/templates/nopaque.html.j2 +++ b/web/app/templates/nopaque.html.j2 @@ -244,28 +244,29 @@ {% block scripts %} {{ super() }} +{% if current_user.setting_dark_mode %} + +{% endif %} + + {% endblock scripts %} From 9fb92c5a651e01d531f450fb96ea9ca8c9af84af Mon Sep 17 00:00:00 2001 From: Patrick Jentsch Date: Thu, 7 Jan 2021 14:51:44 +0100 Subject: [PATCH 34/52] Bump Socket.IO Version and update to new List and Display logic for live data --- web/app/models.py | 2 + web/app/static/js/nopaque/RessorceLists.js | 492 ++++++++++++++++++ .../static/js/nopaque/RessourceDisplays.js | 215 ++++++++ web/app/static/js/nopaque/index.js | 244 +++++++++ web/app/static/js/socket.io.min.js | 7 + web/app/static/js/socket.io.min.js.map | 1 + web/app/static/js/socket.io.slim.js | 9 - web/app/static/js/socket.io.slim.js.map | 1 - web/app/templates/corpora/corpus.html.j2 | 238 ++------- web/app/templates/jobs/job.html.j2 | 317 ++++------- web/app/templates/nopaque.html.j2 | 7 +- web/requirements.txt | 3 +- 12 files changed, 1113 insertions(+), 423 deletions(-) create mode 100644 web/app/static/js/nopaque/RessorceLists.js create mode 100644 web/app/static/js/nopaque/RessourceDisplays.js create mode 100644 web/app/static/js/nopaque/index.js create mode 100644 web/app/static/js/socket.io.min.js create mode 100644 web/app/static/js/socket.io.min.js.map delete mode 100644 web/app/static/js/socket.io.slim.js delete mode 100644 web/app/static/js/socket.io.slim.js.map diff --git a/web/app/models.py b/web/app/models.py index 6b7314ae..757e5f40 100644 --- a/web/app/models.py +++ b/web/app/models.py @@ -547,9 +547,11 @@ class Corpus(db.Model): 'id': self.id, 'user_id': self.user_id, 'creation_date': self.creation_date.timestamp(), + 'current_nr_of_tokens': self.current_nr_of_tokens, 'description': self.description, 'status': self.status, 'last_edited_date': self.last_edited_date.timestamp(), + 'max_nr_of_tokens': self.max_nr_of_tokens, 'title': self.title, 'files': {file.id: file.to_dict() for file in self.files}} diff --git a/web/app/static/js/nopaque/RessorceLists.js b/web/app/static/js/nopaque/RessorceLists.js new file mode 100644 index 00000000..47c0edb8 --- /dev/null +++ b/web/app/static/js/nopaque/RessorceLists.js @@ -0,0 +1,492 @@ +class RessourceList { + /* A wrapper class for the list.js list. + * This class is not meant to be used directly, instead it should be used as + * a base class for concrete ressource list implementations. + */ + constructor(listElement, options = {}) { + if (listElement.dataset.userId) { + if (listElement.dataset.userId in nopaque.appClient.users) { + this.user = nopaque.appClient.users[listElement.dataset.userId]; + } else { + console.error(`User not found: ${listElement.dataset.userId}`); + return; + } + } else { + this.user = nopaque.appClient.users.self; + } + this.list = new List(listElement, {...RessourceList.options, ...options}); + this.list.list.innerHTML = ` + + `; + } + + eventHandler(eventType, payload) { + switch (eventType) { + case 'init': + this.init(payload); + break; + case 'patch': + this.patch(payload); + break; + default: + console.error(`Unknown event type: ${eventType}`); + break; + } + } + + init(ressources) { + this.list.clear(); + this.add(Object.values(ressources)); + this.list.sort('id', {order: 'desc'}); + } + + patch(patch) { + /* + * It's not possible to generalize a patch Handler for all type of + * ressources. So this method is meant to be an interface. + */ + console.error('patch method not implemented!'); + } + + add(values) { + let ressources = Array.isArray(values) ? values : [values]; + if (typeof this.preprocessRessource === 'function') { + ressources = ressources.map(ressource => this.preprocessRessource(ressource)); + } + // Set a callback function ('() => {return;}') to force List.js perform the + // add method asynchronous: https://listjs.com/api/#add + this.list.add(ressources, () => {return;}); + } + + remove(id) { + this.list.remove('id', id); + } + + replace(id, valueName, newValue) { + this.list.get('id', id)[0].values({[valueName]: newValue}); + } +} +RessourceList.options = {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]}; + + +class CorpusList extends RessourceList { + constructor(listElement, options = {}) { + super(listElement, {...CorpusList.options, ...options}); + this.corpora = undefined; + this.user.eventListeners.corpus.addEventListener((eventType, payload) => this.eventHandler(eventType, payload)); + listElement.addEventListener('click', event => this.onclick(event)); + } + + init(corpora) { + this.corpora = corpora; + super.init(corpora); + } + + onclick(event) { + let ressourceElement = event.target.closest('tr'); + if (ressourceElement === null) {return;} + let corpusId = ressourceElement.dataset.id; + let actionButtonElement = event.target.closest('.action-button'); + let action = (actionButtonElement === null) ? 'view' : actionButtonElement.dataset.action; + switch (action) { + case 'analyse': + window.location.href = this.corpora[corpusId].analysis_url; + case 'delete': + let deleteModalHTML = ``; + let deleteModalParentElement = document.querySelector('main'); + deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML); + let deleteModalElement = deleteModalParentElement.lastChild; + let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}}); + deleteModal.open(); + break; + case 'view': + // TODO: handle unprepared corpora + window.location.href = this.corpora[corpusId].url; + break; + default: + console.error(`Unknown action: ${action}`); + break; + } + } + + patch(patch) { + let id, match, re, valueName; + for (let operation of patch) { + switch(operation.op) { + case 'add': + // Matches the only paths that should be handled here: /corpora/{corpusId} + re = /^\/corpora\/(\d+)$/; + if (re.test(operation.path)) {this.add(operation.value);} + break; + case 'remove': + // See case 'add' ;) + re = /^\/corpora\/(\d+)$/; + if (re.test(operation.path)) { + [match, id] = operation.path.match(re); + this.remove(id); + } + break; + case 'replace': + // Matches the only paths that should be handled here: /corpora/{corpusId}/{status || description || title} + re = /^\/corpora\/(\d+)\/(status|description|title)$/; + if (re.test(operation.path)) { + [match, id, valueName] = operation.path.match(re); + this.replace(id, valueName, operation.value); + } + break; + default: + break; + } + } + } + + preprocessRessource(corpus) { + return {id: corpus.id, + status: corpus.status, + description: corpus.description, + title: corpus.title}; + } +} +CorpusList.options = { + item: ` + + + + + `, + valueNames: [{data: ['id']}, {name: 'status', attr: 'data-status'}, 'description', 'title'] +}; + + +class CorpusFileList extends RessourceList { + constructor(listElement, options = {}) { + super(listElement, {...CorpusFileList.options, ...options}); + this.corpus = undefined; + this.user.eventListeners.corpus.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), listElement.dataset.corpusId); + listElement.addEventListener('click', event => this.onclick(event)); + } + + init(corpus) { + this.corpus = corpus; + super.init(this.corpus.files); + } + + onclick(event) { + let ressourceElement = event.target.closest('tr'); + if (ressourceElement === null) {return;} + let corpusFileId = ressourceElement.dataset.id; + let actionButtonElement = event.target.closest('.action-button'); + if (actionButtonElement === null) {return;} + let action = actionButtonElement === null ? 'view' : actionButtonElement.dataset.action; + switch (action) { + case 'delete': + let deleteModalHTML = ``; + let deleteModalParentElement = document.querySelector('main'); + deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML); + let deleteModalElement = deleteModalParentElement.lastChild; + let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}}); + deleteModal.open(); + break; + case 'download': + window.location.href = this.corpus.files[corpusFileId].download_url; + break; + case 'view': + window.location.href = this.corpus.files[corpusFileId].url; + break; + default: + console.error(`Unknown action: "${action}"`); + break; + } + } + + patch(patch) { + let re; + for (let operation of patch) { + switch(operation.op) { + case 'add': + // Matches the only paths that should be handled here: /corpora/{this.corpus.id}/files/{corpusFileId} + re = new RegExp('^/corpora/' + this.corpus.id + '/files/(\\d+)$'); + if (re.test(operation.path)) {this.add(operation.value);} + break; + default: + break; + } + } + } + + preprocessRessource(corpusFile) { + return {id: corpusFile.id, author: corpusFile.author, filename: corpusFile.filename, 'publishing-year': corpusFile.publishing_year, title: corpusFile.title}; + } +} +CorpusFileList.options = { + item: ` + + + + + + `, + valueNames: [{data: ['id']}, 'author', 'filename', 'publishing-year', 'title'] +}; + + +class JobList extends RessourceList { + constructor(listElement, options = {}) { + super(listElement, {...JobList.options, ...options}); + this.user.eventListeners.job.addEventListener((eventType, payload) => this.eventHandler(eventType, payload)); + listElement.addEventListener('click', event => this.onclick(event)); + } + + onclick(event) { + let ressourceElement = event.target.closest('tr'); + if (ressourceElement === null) {return;} + let jobId = ressourceElement.dataset.id; + let actionButtonElement = event.target.closest('.action-button'); + let action = actionButtonElement === null ? 'view' : actionButtonElement.dataset.action; + switch (action) { + case 'delete': + let deleteModalHTML = ``; + let deleteModalParentElement = document.querySelector('main'); + deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML); + let deleteModalElement = deleteModalParentElement.lastChild; + let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}}); + deleteModal.open(); + break; + case 'view': + window.location.href = this.user.data.jobs[jobId].url; + break; + default: + console.error(`Unknown action: "${action}"`); + break; + } + } + + patch(patch) { + let id, match, re, valueName; + for (let operation of patch) { + switch(operation.op) { + case 'add': + // Matches the only paths that should be handled here: /jobs/{jobId} + re = /^\/jobs\/(\d+)$/; + if (re.test(operation.path)) {this.add(operation.value);} + break; + case 'remove': + // See case add ;) + re = /^\/jobs\/(\d+)$/; + if (re.test(operation.path)) { + [match, id] = operation.path.match(re); + this.remove(id); + } + break; + case 'replace': + // Matches the only paths that should be handled here: /jobs/{jobId}/{service || status || description || title} + re = /^\/jobs\/(\d+)\/(status|description|title)$/; + if (re.test(operation.path)) { + [match, id, valueName] = operation.path.match(re); + this.replace(id, valueName, operation.value); + } + break; + default: + break; + } + } + } + + preprocessRessource(job) { + return {id: job.id, + service: job.service, + status: job.status, + description: job.description, + title: job.title}; + } +} +JobList.options = { + item: ` + + + + + `, + valueNames: [{data: ['id']}, {name: 'service', attr: 'data-service'}, {name: 'status', attr: 'data-status'}, 'description', 'title'] +}; + + +class JobInputList extends RessourceList { + constructor(listElement, options = {}) { + super(listElement, {...JobInputList.options, ...options}); + this.job = undefined; + this.user.eventListeners.job.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), listElement.dataset.jobId); + listElement.addEventListener('click', event => this.onclick(event)); + } + + init(job) { + this.job = job; + super.init(this.job.inputs); + } + + onclick(event) { + let ressourceElement = event.target.closest('tr'); + if (ressourceElement === null) {return;} + let jobInputId = ressourceElement.dataset.id; + let actionButtonElement = event.target.closest('.action-button'); + if (actionButtonElement === null) {return;} + let action = actionButtonElement.dataset.action; + switch (action) { + case 'download': + window.location.href = this.job.inputs[jobInputId].download_url; + break; + default: + console.error(`Unknown action: "${action}"`); + break; + } + } + + preprocessRessource(jobInput) { + return {id: jobInput.id, filename: jobInput.filename}; + } +} +JobInputList.options = { + item: ` + + + `, + valueNames: [{data: ['id']}, 'filename'] +}; + + +class JobResultList extends RessourceList { + constructor(listElement, options = {}) { + super(listElement, {...JobResultList.options, ...options}); + this.job = undefined; + this.user.eventListeners.job.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), listElement.dataset.jobId); + listElement.addEventListener('click', event => this.onclick(event)); + } + + init(job) { + this.job = job; + super.init(this.job.results); + } + + onclick(event) { + let ressourceElement = event.target.closest('tr'); + if (ressourceElement === null) {return;} + let jobResultId = ressourceElement.dataset.id; + let actionButtonElement = event.target.closest('.action-button'); + if (actionButtonElement === null) {return;} + let action = actionButtonElement.dataset.action; + switch (action) { + case 'download': + window.location.href = this.job.results[jobResultId].download_url; + break; + default: + console.error(`Unknown action: "${action}"`); + break; + } + } + + patch(patch) { + let re; + for (let operation of patch) { + switch(operation.op) { + case 'add': + // Matches the only paths that should be handled here: /jobs/{this.job.id}/results/{jobResultId} + re = new RegExp('^/jobs/' + this.job.id + '/results/(\\d+)$'); + if (re.test(operation.path)) {this.add(operation.value);} + break; + default: + break; + } + } + } + + preprocessRessource(jobResult) { + let description; + if (jobResult.filename.endsWith('.pdf.zip')) { + description = 'PDF files with text layer'; + } else if (jobResult.filename.endsWith('.txt.zip')) { + description = 'Raw text files'; + } else if (jobResult.filename.endsWith('.vrt.zip')) { + description = 'VRT compliant files including the NLP data'; + } else if (jobResult.filename.endsWith('.xml.zip')) { + description = 'TEI compliant files'; + } else if (jobResult.filename.endsWith('.poco.zip')) { + description = 'HOCR and image files for post correction (PoCo)'; + } else { + description = 'All result files created during this job'; + } + return {id: jobResult.id, description: description, filename: jobResult.filename}; + } +} +JobResultList.options = { + item: ` + + + + `, + valueNames: [{data: ['id']}, 'description', 'filename'] +}; + + +class QueryResultList extends RessourceList { + constructor(listElement, options = {}) { + super(listElement, {...QueryResultList.options, ...options}); + this.user.eventListeners.queryResult.addEventListener((eventType, payload) => this.eventHandler(eventType, payload)); + } +} +QueryResultList.options = { + item: ` + + + + `, + valueNames: [{data: ['id']}, 'corpus_title', 'description', 'query', 'title'] +}; diff --git a/web/app/static/js/nopaque/RessourceDisplays.js b/web/app/static/js/nopaque/RessourceDisplays.js new file mode 100644 index 00000000..67bb7caf --- /dev/null +++ b/web/app/static/js/nopaque/RessourceDisplays.js @@ -0,0 +1,215 @@ +class RessourceDisplay { + constructor(displayElement) { + if (displayElement.dataset.userId) { + if (displayElement.dataset.userId in nopaque.appClient.users) { + this.user = nopaque.appClient.users[displayElement.dataset.userId]; + } else { + console.error(`User not found: ${displayElement.dataset.userId}`); + return; + } + } else { + this.user = nopaque.appClient.users.self; + } + this.displayElement = displayElement; + } + + eventHandler(eventType, payload) { + switch (eventType) { + case 'init': + this.init(payload); + break; + case 'patch': + this.patch(payload); + break; + default: + console.log(`Unknown event type: ${eventType}`); + break; + } + } + + init() {console.error('init method not implemented!');} + + patch() {console.error('patch method not implemented!');} + + setElement(element, value) { + switch (element.tagName) { + case 'INPUT': + element.value = value; + M.updateTextFields(); + break; + default: + element.innerText = value; + break; + } + } +} + + +class CorpusDisplay extends RessourceDisplay { + constructor(displayElement) { + super(displayElement); + this.corpus = undefined; + this.user.eventListeners.corpus.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), displayElement.dataset.corpusId); + } + + init(corpus) { + this.corpus = corpus; + this.setCreationDate(this.corpus.creation_date); + this.setDescription(this.corpus.description); + this.setLastEditedDate(this.corpus.last_edited_date); + this.setStatus(this.corpus.status); + this.setTitle(this.corpus.title); + this.setTokenRatio(this.corpus.current_nr_of_tokens, this.corpus.max_nr_of_tokens); + } + + patch(patch) { + let re; + for (let operation of patch) { + switch(operation.op) { + case 'replace': + // Matches: /jobs/{this.job.id}/status + re = new RegExp('^/corpora/' + this.corpus.id + '/last_edited_date'); + if (re.test(operation.path)) {this.setLastEditedDate(operation.value); break;} + // Matches: /jobs/{this.job.id}/status + re = new RegExp('^/corpora/' + this.corpus.id + '/status$'); + if (re.test(operation.path)) {this.setStatus(operation.value); break;} + break; + default: + break; + } + } + } + + setTitle(title) { + for (let element of this.displayElement.querySelectorAll('.corpus-title')) {this.setElement(element, title);} + } + + setTokenRatio(currentNrOfTokens, maxNrOfTokens) { + let tokenRatio = `${currentNrOfTokens}/${maxNrOfTokens}`; + for (let element of this.displayElement.querySelectorAll('.corpus-token-ratio')) {this.setElement(element, tokenRatio);} + } + + setDescription(description) { + for (let element of this.displayElement.querySelectorAll('.corpus-description')) {this.setElement(element, description);} + } + + setStatus(status) { + for (let element of this.displayElement.querySelectorAll('.corpus-status')) {this.setElement(element, status);} + for (let element of this.displayElement.querySelectorAll('.status')) {element.dataset.status = status;} + for (let element of this.displayElement.querySelectorAll('.status-spinner')) { + if (['complete', 'failed', 'unprepared'].includes(status)) { + element.classList.add('hide'); + } else { + element.classList.remove('hide'); + } + } + for (let element of this.displayElement.querySelectorAll('.build-corpus-trigger')) { + if (['complete', 'failed'].includes(status)) { + element.classList.remove('hide'); + } else { + element.classList.add('hide'); + } + } + } + + setCreationDate(creationDateTimestamp) { + let creationDate = new Date(creationDateTimestamp * 1000).toLocaleString("en-US"); + for (let element of this.displayElement.querySelectorAll('.corpus-creation-date')) {this.setElement(element, creationDate);} + } + + setLastEditedDate(endDateTimestamp) { + let endDate = new Date(endDateTimestamp * 1000).toLocaleString("en-US"); + for (let element of this.displayElement.querySelectorAll('.corpus-end-date')) {this.setElement(element, endDate);} + } +} + + + +class JobDisplay extends RessourceDisplay { + constructor(displayElement) { + super(displayElement); + this.job = undefined; + this.user.eventListeners.job.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), displayElement.dataset.jobId); + } + + init(job) { + this.job = job; + this.setCreationDate(this.job.creation_date); + this.setEndDate(this.job.creation_date); + this.setDescription(this.job.description); + this.setService(this.job.service); + this.setServiceArgs(this.job.service_args); + this.setServiceVersion(this.job.service_version); + this.setStatus(this.job.status); + this.setTitle(this.job.title); + } + + patch(patch) { + let re; + for (let operation of patch) { + switch(operation.op) { + case 'replace': + // Matches: /jobs/{this.job.id}/status + re = new RegExp('^/jobs/' + this.job.id + '/end_date'); + if (re.test(operation.path)) {this.setEndDate(operation.value); break;} + // Matches: /jobs/{this.job.id}/status + re = new RegExp('^/jobs/' + this.job.id + '/status$'); + if (re.test(operation.path)) {this.setStatus(operation.value); break;} + break; + default: + break; + } + } + } + + setTitle(title) { + for (let element of this.displayElement.querySelectorAll('.job-title')) {this.setElement(element, title);} + } + + setDescription(description) { + for (let element of this.displayElement.querySelectorAll('.job-description')) {this.setElement(element, description);} + } + + setStatus(status) { + for (let element of this.displayElement.querySelectorAll('.job-status')) { + this.setElement(element, status); + } + for (let element of this.displayElement.querySelectorAll('.status')) {element.dataset.status = status;} + for (let element of this.displayElement.querySelectorAll('.status-spinner')) { + if (['complete', 'failed'].includes(status)) { + element.classList.add('hide'); + } else { + element.classList.remove('hide'); + } + } + for (let element of this.displayElement.querySelectorAll('.restart-job-trigger')) { + if (['complete', 'failed'].includes(status)) { + element.classList.remove('hide'); + } else { + element.classList.add('hide'); + } + } + } + + setCreationDate(creationDateTimestamp) { + let creationDate = new Date(creationDateTimestamp * 1000).toLocaleString("en-US"); + for (let element of this.displayElement.querySelectorAll('.job-creation-date')) {this.setElement(element, creationDate);} + } + + setEndDate(endDateTimestamp) { + let endDate = new Date(endDateTimestamp * 1000).toLocaleString("en-US"); + for (let element of this.displayElement.querySelectorAll('.job-end-date')) {this.setElement(element, endDate);} + } + + setService(service) { + for (let element of this.displayElement.querySelectorAll('.job-service')) {this.setElement(element, service);} + } + + setServiceArgs(serviceArgs) { + for (let element of this.displayElement.querySelectorAll('.job-service-args')) {this.setElement(element, serviceArgs);} + } + + setServiceVersion(serviceVersion) { + for (let element of this.displayElement.querySelectorAll('.job-service-version')) {this.setElement(element, serviceVersion);} + } +} diff --git a/web/app/static/js/nopaque/index.js b/web/app/static/js/nopaque/index.js new file mode 100644 index 00000000..1bf3561c --- /dev/null +++ b/web/app/static/js/nopaque/index.js @@ -0,0 +1,244 @@ +class AppClient { + constructor(currentUserId) { + this.socket = io({transports: ['websocket']}); + this.users = {}; + this.users.self = this.loadUser(currentUserId); + } + + loadUser(userId) { + if (userId in this.users) {return this.users[userId];} + let user = new User(); + this.users[userId] = user; + this.socket.on(`user_${userId}_init`, msg => user.init(JSON.parse(msg))); + this.socket.on(`user_${userId}_patch`, msg => user.patch(JSON.parse(msg))); + this.socket.emit('start_user_session', userId); + return user; + } +} + + +class User { + constructor() { + this.data = undefined; + this.eventListeners = { + corpus: { + addEventListener(listener, corpusId='*') { + if (corpusId in this) {this[corpusId].push(listener);} else {this[corpusId] = [listener];} + } + }, + job: { + addEventListener(listener, jobId='*') { + if (jobId in this) {this[jobId].push(listener);} else {this[jobId] = [listener];} + } + }, + queryResult: { + addEventListener(listener, queryResultId='*') { + if (queryResultId in this) {this[queryResultId].push(listener);} else {this[queryResultId] = [listener];} + } + } + }; + } + + init(data) { + this.data = data; + + if (Object.keys(this.data.corpora).length > 0) { + //for (listener of this.eventListeners.corporaInit) {listener(this.data.corpora);} + for (let [corpusId, eventListeners] of Object.entries(this.eventListeners.corpus)) { + if (corpusId === '*') { + for (let eventListener of eventListeners) {eventListener('init', this.data.corpora);} + } else { + if (corpusId in this.data.corpora) { + for (let eventListener of eventListeners) {eventListener('init', this.data.corpora[corpusId]);} + } + } + } + } + + if (Object.keys(this.data.jobs).length > 0) { + //for (listener of this.eventListeners.jobsInit) {listener(this.data.jobs);} + for (let [jobId, eventListeners] of Object.entries(this.eventListeners.job)) { + if (jobId === '*') { + for (let eventListener of eventListeners) {eventListener('init', this.data.jobs);} + } else { + if (jobId in this.data.jobs) { + for (let eventListener of eventListeners) {eventListener('init', this.data.jobs[jobId]);} + } + } + } + } + + if (Object.keys(this.data.query_results).length > 0) { + //for (listener of this.eventListeners.queryResultsInit) {listener(this.data.query_results);} + for (let [queryResultId, eventListeners] of Object.entries(this.eventListeners.queryResult)) { + if (queryResultId === '*') { + for (let eventListener of eventListeners) {eventListener('init', this.data.query_results);} + } else { + if (queryResultId in this.data.query_results) { + for (let eventListener of eventListeners) {eventListener('init', this.data.query_results[queryResultId]);} + } + } + } + } + } + + patch(patch) { + this.data = jsonpatch.apply_patch(this.data, patch); + + let corporaPatch = patch.filter(operation => operation.path.startsWith("/corpora")); + if (corporaPatch.length > 0) { + for (let [corpusId, eventListeners] of Object.entries(this.eventListeners.corpus)) { + if (corpusId === '*') { + for (let eventListener of eventListeners) {eventListener('patch', corporaPatch);} + } else { + let corpusPatch = corporaPatch.filter(operation => operation.path.startsWith(`/corpora/${corpusId}`)); + if (corpusPatch.length > 0) { + for (let eventListener of eventListeners) {eventListener('patch', corpusPatch);} + } + } + } + } + + let jobsPatch = patch.filter(operation => operation.path.startsWith("/jobs")); + if (jobsPatch.length > 0) { + for (let [jobId, eventListeners] of Object.entries(this.eventListeners.job)) { + if (jobId === '*') { + for (let eventListener of eventListeners) {eventListener('patch', jobsPatch);} + } else { + let jobPatch = jobsPatch.filter(operation => operation.path.startsWith(`/jobs/${jobId}`)); + if (jobPatch.length > 0) { + for (let eventListener of eventListeners) {eventListener('patch', jobPatch);} + } + } + } + } + + let queryResultsPatch = patch.filter(operation => operation.path.startsWith("/query_results")); + if (queryResultsPatch.length > 0) { + for (let [queryResultId, eventListeners] of Object.entries(this.eventListeners.queryResult)) { + if (queryResultId === '*') { + for (let eventListener of eventListeners) {eventListener('patch', queryResultsPatch);} + } else { + let queryResultPatch = queryResultsPatch.filter(operation => operation.path.startsWith(`/query_results/${queryResultId}`)); + if (queryResultPatch.length > 0) { + for (let eventListener of eventListeners) {eventListener('patch', queryResultPatch);} + } + } + } + } + + for (let operation of jobsPatch) { + if (operation.op !== 'replace') {continue;} + // Matches the only path that should be handled here: /jobs/{jobId}/status + if (/^\/jobs\/(\d+)\/status$/.test(operation.path)) { + let [match, jobId] = operation.path.match(/^\/jobs\/(\d+)\/status$/); + if (this.data.settings.job_status_site_notifications === "end" && !['complete', 'failed'].includes(operation.value)) {continue;} + nopaque.flash(`[${this.data.jobs[jobId].title}] New status: ${operation.value}`, 'job'); + } + } + } +} + + +/* + * The nopaque object is used as a namespace for nopaque specific functions and + * variables. + */ +var nopaque = {}; + +nopaque.flash = function(message, category) { + let toast; + let toastActionElement; + + switch (category) { + case "corpus": + message = `book${message}`; + break; + case "error": + message = `error${message}`; + break; + case "job": + message = `work${message}`; + break; + default: + message = `notifications${message}`; + } + + toast = M.toast({html: `${message} + `}); + toastActionElement = toast.el.querySelector('.toast-action[data-action="close"]'); + toastActionElement.addEventListener('click', () => {toast.dismiss();}); +}; + +nopaque.Forms = {}; +nopaque.Forms.init = function() { + var abortRequestElement, parentElement, progressElement, progressModal, + progressModalElement, request, submitElement; + + for (let form of document.querySelectorAll(".nopaque-submit-form")) { + submitElement = form.querySelector('button[type="submit"]'); + submitElement.addEventListener("click", function() { + for (let selectElement of form.querySelectorAll('select')) { + if (selectElement.value === "") { + parentElement = selectElement.closest(".input-field"); + parentElement.querySelector(".select-dropdown").classList.add("invalid"); + for (let helperTextElement of parentElement.querySelectorAll(".helper-text")) { + helperTextElement.remove(); + } + parentElement.insertAdjacentHTML("beforeend", `Please select an option.`); + } + } + }); + + request = new XMLHttpRequest(); + if (form.dataset.hasOwnProperty("progressModal")) { + progressModalElement = document.getElementById(form.dataset.progressModal); + progressModal = M.Modal.getInstance(progressModalElement); + progressModal.options.dismissible = false; + abortRequestElement = progressModalElement.querySelector(".abort-request"); + abortRequestElement.addEventListener("click", function() {request.abort();}); + progressElement = progressModalElement.querySelector(".determinate"); + } + form.addEventListener("submit", function(event) { + event.preventDefault(); + var formData; + + formData = new FormData(form); + // Initialize progress modal + if (progressModalElement) { + progressElement.style.width = "0%"; + progressModal.open(); + } + request.open("POST", window.location.href); + request.send(formData); + }); + request.addEventListener("load", function(event) { + var fieldElement; + + if (request.status === 201) { + window.location.href = JSON.parse(this.responseText).redirect_url; + } + if (request.status === 400) { + for (let [field, errors] of Object.entries(JSON.parse(this.responseText))) { + fieldElement = form.querySelector(`input[name$="${field}"]`).closest(".input-field"); + for (let error of errors) { + fieldElement.insertAdjacentHTML("beforeend", `${error}`); + } + } + if (progressModalElement) { + progressModal.close(); + } + } + if (request.status === 500) { + location.reload(); + } + }); + if (progressModalElement) { + request.upload.addEventListener("progress", function(event) { + progressElement.style.width = Math.floor(100 * event.loaded / event.total).toString() + "%"; + }); + } + } +} diff --git a/web/app/static/js/socket.io.min.js b/web/app/static/js/socket.io.min.js new file mode 100644 index 00000000..cec3c9ca --- /dev/null +++ b/web/app/static/js/socket.io.min.js @@ -0,0 +1,7 @@ +/*! + * Socket.IO v3.0.4 + * (c) 2014-2020 Guillermo Rauch + * Released under the MIT License. + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.io=e():t.io=e()}("undefined"!=typeof self?self:"undefined"!=typeof window?window:"undefined"!=typeof global?global:Function("return this")(),(function(){return function(t){var e={};function n(r){if(e[r])return e[r].exports;var o=e[r]={i:r,l:!1,exports:{}};return t[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=t,n.c=e,n.d=function(t,e,r){n.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:r})},n.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},n.t=function(t,e){if(1&e&&(t=n(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var o in t)n.d(r,o,function(e){return t[e]}.bind(null,o));return r},n.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return n.d(e,"a",e),e},n.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},n.p="",n(n.s=18)}([function(t,e,n){function r(t){if(t)return function(t){for(var e in r.prototype)t[e]=r.prototype[e];return t}(t)}t.exports=r,r.prototype.on=r.prototype.addEventListener=function(t,e){return this._callbacks=this._callbacks||{},(this._callbacks["$"+t]=this._callbacks["$"+t]||[]).push(e),this},r.prototype.once=function(t,e){function n(){this.off(t,n),e.apply(this,arguments)}return n.fn=e,this.on(t,n),this},r.prototype.off=r.prototype.removeListener=r.prototype.removeAllListeners=r.prototype.removeEventListener=function(t,e){if(this._callbacks=this._callbacks||{},0==arguments.length)return this._callbacks={},this;var n,r=this._callbacks["$"+t];if(!r)return this;if(1==arguments.length)return delete this._callbacks["$"+t],this;for(var o=0;o=this._reconnectionAttempts)this.backoff.reset(),i(u(b.prototype),"emit",this).call(this,"reconnect_failed"),this._reconnecting=!1;else{var n=this.backoff.duration();this._reconnecting=!0;var r=setTimeout((function(){e.skipReconnect||(i(u(b.prototype),"emit",t).call(t,"reconnect_attempt",e.backoff.attempts),e.skipReconnect||e.open((function(n){n?(e._reconnecting=!1,e.reconnect(),i(u(b.prototype),"emit",t).call(t,"reconnect_error",n)):e.onreconnect()})))}),n);this.subs.push({destroy:function(){clearTimeout(r)}})}}},{key:"onreconnect",value:function(){var t=this.backoff.attempts;this._reconnecting=!1,this.backoff.reset(),i(u(b.prototype),"emit",this).call(this,"reconnect",t)}}])&&o(e.prototype,n),a&&o(e,a),b}(l);e.Manager=b},function(t,e,n){var r=n(9),o=n(23),i=n(27),s=n(28);e.polling=function(t){var e=!1,n=!1,s=!1!==t.jsonp;if("undefined"!=typeof location){var c="https:"===location.protocol,a=location.port;a||(a=c?443:80),e=t.hostname!==location.hostname||a!==t.port,n=t.secure!==c}if(t.xdomain=e,t.xscheme=n,"open"in new r(t)&&!t.forceJSONP)return new o(t);if(!s)throw new Error("JSONP disabled");return new i(t)},e.websocket=s},function(t,e,n){var r=n(22),o=n(2);t.exports=function(t){var e=t.xdomain,n=t.xscheme,i=t.enablesXDR;try{if("undefined"!=typeof XMLHttpRequest&&(!e||r))return new XMLHttpRequest}catch(t){}try{if("undefined"!=typeof XDomainRequest&&!n&&i)return new XDomainRequest}catch(t){}if(!e)try{return new(o[["Active"].concat("Object").join("X")])("Microsoft.XMLHTTP")}catch(t){}}},function(t,e,n){function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function i(t,e){for(var n=0;n0);return e}function u(){var t=a(+new Date);return t!==r?(s=0,r=t):t+"."+a(s++)}for(;c<64;c++)i[o[c]]=c;u.encode=a,u.decode=function(t){var e=0;for(c=0;c1?e-1:0),r=1;r=t.length?{done:!0}:{done:!1,value:t[r++]}},e:function(t){throw t},f:o}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var s,c=!0,a=!1;return{s:function(){n=t[Symbol.iterator]()},n:function(){var t=n.next();return c=t.done,t},e:function(t){a=!0,s=t},f:function(){try{c||null==n.return||n.return()}finally{if(a)throw s}}}}function i(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,r=new Array(e);n1?e-1:0),r=1;r1&&void 0!==arguments[1]?arguments[1]:{};return i(this,l),e=f.call(this),t&&"object"===o(t)&&(n=t,t=null),t?(t=y(t),n.hostname=t.host,n.secure="https"===t.protocol||"wss"===t.protocol,n.port=t.port,t.query&&(n.query=t.query)):n.host&&(n.hostname=y(n.host).host),e.secure=null!=n.secure?n.secure:"undefined"!=typeof location&&"https:"===location.protocol,n.hostname&&!n.port&&(n.port=e.secure?"443":"80"),e.hostname=n.hostname||("undefined"!=typeof location?location.hostname:"localhost"),e.port=n.port||("undefined"!=typeof location&&location.port?location.port:e.secure?443:80),e.transports=n.transports||["polling","websocket"],e.readyState="",e.writeBuffer=[],e.prevBufferLen=0,e.opts=r({path:"/engine.io",agent:!1,withCredentials:!1,upgrade:!0,jsonp:!0,timestampParam:"t",rememberUpgrade:!1,rejectUnauthorized:!0,perMessageDeflate:{threshold:1024},transportOptions:{}},n),e.opts.path=e.opts.path.replace(/\/$/,"")+"/","string"==typeof e.opts.query&&(e.opts.query=d.decode(e.opts.query)),e.id=null,e.upgrades=null,e.pingInterval=null,e.pingTimeout=null,e.pingTimeoutTimer=null,e.open(),e}return e=l,(n=[{key:"createTransport",value:function(t){var e=function(t){var e={};for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n]);return e}(this.opts.query);e.EIO=h.protocol,e.transport=t,this.id&&(e.sid=this.id);var n=r({},this.opts.transportOptions[t],this.opts,{query:e,socket:this,hostname:this.hostname,secure:this.secure,port:this.port});return new p[t](n)}},{key:"open",value:function(){var t;if(this.opts.rememberUpgrade&&l.priorWebsocketSuccess&&-1!==this.transports.indexOf("websocket"))t="websocket";else{if(0===this.transports.length){var e=this;return void setTimeout((function(){e.emit("error","No transports available")}),0)}t=this.transports[0]}this.readyState="opening";try{t=this.createTransport(t)}catch(t){return this.transports.shift(),void this.open()}t.open(),this.setTransport(t)}},{key:"setTransport",value:function(t){var e=this;this.transport&&this.transport.removeAllListeners(),this.transport=t,t.on("drain",(function(){e.onDrain()})).on("packet",(function(t){e.onPacket(t)})).on("error",(function(t){e.onError(t)})).on("close",(function(){e.onClose("transport close")}))}},{key:"probe",value:function(t){var e=this.createTransport(t,{probe:1}),n=!1,r=this;function o(){if(r.onlyBinaryUpgrades){var t=!this.supportsBinary&&r.transport.supportsBinary;n=n||t}n||(e.send([{type:"ping",data:"probe"}]),e.once("packet",(function(t){if(!n)if("pong"===t.type&&"probe"===t.data){if(r.upgrading=!0,r.emit("upgrading",e),!e)return;l.priorWebsocketSuccess="websocket"===e.name,r.transport.pause((function(){n||"closed"!==r.readyState&&(f(),r.setTransport(e),e.send([{type:"upgrade"}]),r.emit("upgrade",e),e=null,r.upgrading=!1,r.flush())}))}else{var o=new Error("probe error");o.transport=e.name,r.emit("upgradeError",o)}})))}function i(){n||(n=!0,f(),e.close(),e=null)}function s(t){var n=new Error("probe error: "+t);n.transport=e.name,i(),r.emit("upgradeError",n)}function c(){s("transport closed")}function a(){s("socket closed")}function u(t){e&&t.name!==e.name&&i()}function f(){e.removeListener("open",o),e.removeListener("error",s),e.removeListener("close",c),r.removeListener("close",a),r.removeListener("upgrading",u)}l.priorWebsocketSuccess=!1,e.once("open",o),e.once("error",s),e.once("close",c),this.once("close",a),this.once("upgrading",u),e.open()}},{key:"onOpen",value:function(){if(this.readyState="open",l.priorWebsocketSuccess="websocket"===this.transport.name,this.emit("open"),this.flush(),"open"===this.readyState&&this.opts.upgrade&&this.transport.pause)for(var t=0,e=this.upgrades.length;t0&&void 0!==arguments[0]?arguments[0]:{};return o(t,{xd:this.xd,xs:this.xs},this.opts),new w(this.uri(),t)}},{key:"doWrite",value:function(t,e){var n=this.request({method:"POST",data:t}),r=this;n.on("success",e),n.on("error",(function(t){r.onError("xhr post error",t)}))}},{key:"doPoll",value:function(){var t=this.request(),e=this;t.on("data",(function(t){e.onData(t)})),t.on("error",(function(t){e.onError("xhr poll error",t)})),this.pollXhr=t}}]),n}(y),w=function(t){a(n,t);var e=f(n);function n(t,r){var o;return i(this,n),(o=e.call(this)).opts=r,o.method=r.method||"GET",o.uri=t,o.async=!1!==r.async,o.data=void 0!==r.data?r.data:null,o.create(),o}return c(n,[{key:"create",value:function(){var t=v(this.opts,"agent","enablesXDR","pfx","key","passphrase","cert","ca","ciphers","rejectUnauthorized");t.xdomain=!!this.opts.xd,t.xscheme=!!this.opts.xs;var e=this.xhr=new h(t),r=this;try{e.open(this.method,this.uri,this.async);try{if(this.opts.extraHeaders)for(var o in e.setDisableHeaderCheck&&e.setDisableHeaderCheck(!0),this.opts.extraHeaders)this.opts.extraHeaders.hasOwnProperty(o)&&e.setRequestHeader(o,this.opts.extraHeaders[o])}catch(t){}if("POST"===this.method)try{e.setRequestHeader("Content-type","text/plain;charset=UTF-8")}catch(t){}try{e.setRequestHeader("Accept","*/*")}catch(t){}"withCredentials"in e&&(e.withCredentials=this.opts.withCredentials),this.opts.requestTimeout&&(e.timeout=this.opts.requestTimeout),this.hasXDR()?(e.onload=function(){r.onLoad()},e.onerror=function(){r.onError(e.responseText)}):e.onreadystatechange=function(){4===e.readyState&&(200===e.status||1223===e.status?r.onLoad():setTimeout((function(){r.onError("number"==typeof e.status?e.status:0)}),0))},e.send(this.data)}catch(t){return void setTimeout((function(){r.onError(t)}),0)}"undefined"!=typeof document&&(this.index=n.requestsCount++,n.requests[this.index]=this)}},{key:"onSuccess",value:function(){this.emit("success"),this.cleanup()}},{key:"onData",value:function(t){this.emit("data",t),this.onSuccess()}},{key:"onError",value:function(t){this.emit("error",t),this.cleanup(!0)}},{key:"cleanup",value:function(t){if(void 0!==this.xhr&&null!==this.xhr){if(this.hasXDR()?this.xhr.onload=this.xhr.onerror=m:this.xhr.onreadystatechange=m,t)try{this.xhr.abort()}catch(t){}"undefined"!=typeof document&&delete n.requests[this.index],this.xhr=null}}},{key:"onLoad",value:function(){var t=this.xhr.responseText;null!==t&&this.onData(t)}},{key:"hasXDR",value:function(){return"undefined"!=typeof XDomainRequest&&!this.xs&&this.enablesXDR}},{key:"abort",value:function(){this.cleanup()}}]),n}(d);if(w.requestsCount=0,w.requests={},"undefined"!=typeof document)if("function"==typeof attachEvent)attachEvent("onunload",_);else if("function"==typeof addEventListener){addEventListener("onpagehide"in b?"pagehide":"unload",_,!1)}function _(){for(var t in w.requests)w.requests.hasOwnProperty(t)&&w.requests[t].abort()}t.exports=k,t.exports.Request=w},function(t,e,n){var r=n(11).PACKET_TYPES,o="function"==typeof Blob||"undefined"!=typeof Blob&&"[object BlobConstructor]"===Object.prototype.toString.call(Blob),i="function"==typeof ArrayBuffer,s=function(t,e){var n=new FileReader;return n.onload=function(){var t=n.result.split(",")[1];e("b"+t)},n.readAsDataURL(t)};t.exports=function(t,e,n){var c,a=t.type,u=t.data;return o&&u instanceof Blob?e?n(u):s(u,n):i&&(u instanceof ArrayBuffer||(c=u,"function"==typeof ArrayBuffer.isView?ArrayBuffer.isView(c):c&&c.buffer instanceof ArrayBuffer))?e?n(u instanceof ArrayBuffer?u:u.buffer):s(new Blob([u]),n):n(r[a]+(u||""))}},function(t,e,n){var r,o=n(11),i=o.PACKET_TYPES_REVERSE,s=o.ERROR_PACKET;"function"==typeof ArrayBuffer&&(r=n(26));var c=function(t,e){if(r){var n=r.decode(t);return a(n,e)}return{base64:!0,data:t}},a=function(t,e){switch(e){case"blob":return t instanceof ArrayBuffer?new Blob([t]):t;case"arraybuffer":default:return t}};t.exports=function(t,e){if("string"!=typeof t)return{type:"message",data:a(t,e)};var n=t.charAt(0);return"b"===n?{type:"message",data:c(t.substring(1),e)}:i[n]?t.length>1?{type:i[n],data:t.substring(1)}:{type:i[n]}:s}},function(t,e){!function(){"use strict";for(var t="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",n=new Uint8Array(256),r=0;r>2],i+=t[(3&r[n])<<4|r[n+1]>>4],i+=t[(15&r[n+1])<<2|r[n+2]>>6],i+=t[63&r[n+2]];return o%3==2?i=i.substring(0,i.length-1)+"=":o%3==1&&(i=i.substring(0,i.length-2)+"=="),i},e.decode=function(t){var e,r,o,i,s,c=.75*t.length,a=t.length,u=0;"="===t[t.length-1]&&(c--,"="===t[t.length-2]&&c--);var f=new ArrayBuffer(c),p=new Uint8Array(f);for(e=0;e>4,p[u++]=(15&o)<<4|i>>2,p[u++]=(3&i)<<6|63&s;return f}}()},function(t,e,n){function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){for(var n=0;n';n=document.createElement(t)}catch(t){(n=document.createElement("iframe")).name=r.iframeId,n.src="javascript:0"}n.id=r.iframeId,r.form.appendChild(n),r.iframe=n}this.form.action=this.uri(),a(),t=t.replace(d,"\\\n"),this.area.value=t.replace(y,"\\n");try{this.form.submit()}catch(t){}this.iframe.attachEvent?this.iframe.onreadystatechange=function(){"complete"===r.iframe.readyState&&c()}:this.iframe.onload=c}},{key:"supportsBinary",get:function(){return!1}}])&&o(e.prototype,n),r&&o(e,r),l}(l);t.exports=b},function(t,e,n){function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){for(var n=0;n0&&t.jitter<=1?t.jitter:0,this.attempts=0}t.exports=n,n.prototype.duration=function(){var t=this.ms*Math.pow(this.factor,this.attempts++);if(this.jitter){var e=Math.random(),n=Math.floor(e*this.jitter*t);t=0==(1&Math.floor(10*e))?t-n:t+n}return 0|Math.min(t,this.max)},n.prototype.reset=function(){this.attempts=0},n.prototype.setMin=function(t){this.ms=t},n.prototype.setMax=function(t){this.max=t},n.prototype.setJitter=function(t){this.jitter=t}}])})); +//# sourceMappingURL=socket.io.min.js.map \ No newline at end of file diff --git a/web/app/static/js/socket.io.min.js.map b/web/app/static/js/socket.io.min.js.map new file mode 100644 index 00000000..80c088a3 --- /dev/null +++ b/web/app/static/js/socket.io.min.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["webpack://io/webpack/universalModuleDefinition","webpack://io/webpack/bootstrap","webpack://io/./node_modules/component-emitter/index.js","webpack://io/./node_modules/engine.io-parser/lib/index.js","webpack://io/./node_modules/engine.io-client/lib/globalThis.browser.js","webpack://io/./node_modules/engine.io-client/lib/transport.js","webpack://io/./node_modules/parseqs/index.js","webpack://io/./node_modules/socket.io-parser/dist/index.js","webpack://io/./node_modules/parseuri/index.js","webpack://io/./build/manager.js","webpack://io/./node_modules/engine.io-client/lib/transports/index.js","webpack://io/./node_modules/engine.io-client/lib/xmlhttprequest.js","webpack://io/./node_modules/engine.io-client/lib/transports/polling.js","webpack://io/./node_modules/engine.io-parser/lib/commons.js","webpack://io/./node_modules/yeast/index.js","webpack://io/./node_modules/engine.io-client/lib/util.js","webpack://io/./build/socket.js","webpack://io/./node_modules/socket.io-parser/dist/is-binary.js","webpack://io/./build/on.js","webpack://io/./node_modules/component-bind/index.js","webpack://io/./build/index.js","webpack://io/./build/url.js","webpack://io/./node_modules/engine.io-client/lib/index.js","webpack://io/./node_modules/engine.io-client/lib/socket.js","webpack://io/./node_modules/has-cors/index.js","webpack://io/./node_modules/engine.io-client/lib/transports/polling-xhr.js","webpack://io/./node_modules/engine.io-parser/lib/encodePacket.browser.js","webpack://io/./node_modules/engine.io-parser/lib/decodePacket.browser.js","webpack://io/./node_modules/base64-arraybuffer/lib/base64-arraybuffer.js","webpack://io/./node_modules/engine.io-client/lib/transports/polling-jsonp.js","webpack://io/./node_modules/engine.io-client/lib/transports/websocket.js","webpack://io/./node_modules/engine.io-client/lib/transports/websocket-constructor.browser.js","webpack://io/./node_modules/socket.io-parser/dist/binary.js","webpack://io/./node_modules/backo2/index.js"],"names":["root","factory","exports","module","define","amd","self","window","global","Function","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","Emitter","obj","mixin","on","addEventListener","event","fn","this","_callbacks","push","once","off","apply","arguments","removeListener","removeAllListeners","removeEventListener","length","cb","callbacks","splice","emit","args","Array","len","slice","listeners","hasListeners","encodePacket","require","decodePacket","SEPARATOR","String","fromCharCode","protocol","encodePayload","packets","callback","encodedPackets","count","forEach","packet","encodedPacket","join","decodePayload","encodedPayload","binaryType","split","decodedPacket","type","parser","Transport","opts","query","readyState","socket","msg","desc","err","Error","description","doOpen","doClose","onClose","write","writable","data","onPacket","encode","str","encodeURIComponent","decode","qs","qry","pairs","pair","decodeURIComponent","Decoder","Encoder","PacketType","binary_1","is_binary_1","EVENT","ACK","hasBinary","encodeAsString","BINARY_EVENT","BINARY_ACK","encodeAsBinary","attachments","nsp","id","JSON","stringify","deconstruction","deconstructPacket","pack","buffers","unshift","decodeString","reconstructor","BinaryReconstructor","isBinary","base64","takeBinaryData","Number","charAt","undefined","start","buf","substring","next","payload","parse","e","tryParse","substr","isPayloadValid","finishedReconstruction","CONNECT","DISCONNECT","CONNECT_ERROR","isArray","reconPack","binData","reconstructPacket","re","parts","src","b","indexOf","replace","exec","uri","source","host","authority","ipv6uri","pathNames","path","names","queryKey","$0","$1","$2","Manager","eio","socket_1","on_1","Backoff","nsps","subs","reconnection","reconnectionAttempts","Infinity","reconnectionDelay","reconnectionDelayMax","randomizationFactor","backoff","min","max","jitter","timeout","_readyState","_parser","encoder","decoder","_autoConnect","autoConnect","open","v","_reconnection","_reconnectionAttempts","_a","_reconnectionDelay","setMin","_randomizationFactor","setJitter","_reconnectionDelayMax","setMax","_timeout","_reconnecting","attempts","reconnect","engine","skipReconnect","openSub","onopen","errorSub","cleanup","maybeReconnectOnOpen","destroy","timer","setTimeout","close","clearTimeout","add","Socket","keys","active","_close","options","subsLength","shift","reset","reason","delay","duration","onreconnect","attempt","XMLHttpRequest","XHR","JSONP","websocket","polling","xd","xs","jsonp","location","isSSL","port","hostname","secure","xdomain","xscheme","forceJSONP","hasCORS","globalThis","enablesXDR","XDomainRequest","concat","parseqs","yeast","Polling","poll","onPause","pause","total","doPoll","index","onOpen","doWrite","schema","timestampRequests","timestampParam","supportsBinary","sid","b64","PACKET_TYPES","PACKET_TYPES_REVERSE","ERROR_PACKET","prev","alphabet","map","seed","num","encoded","Math","floor","now","Date","decoded","pick","attr","reduce","acc","k","socket_io_parser_1","RESERVED_EVENTS","freeze","connect","connect_error","disconnect","disconnecting","newListener","io","ids","acks","receiveBuffer","sendBuffer","flags","connected","disconnected","auth","subEvents","ev","compress","pop","isTransportWritable","transport","discardPacket","_packet","onconnect","onevent","onack","ondisconnect","message","ack","emitEvent","_anyListeners","sent","emitBuffered","onclose","listener","withNativeArrayBuffer","ArrayBuffer","toString","withNativeBlob","Blob","withNativeFile","File","isView","buffer","toJSON","url_1","manager_1","lookup","cache","managers","parsed","url","sameNamespace","forceNew","multiplex","manager_2","parseuri","loc","test","href","transports","writeBuffer","prevBufferLen","agent","withCredentials","upgrade","rememberUpgrade","rejectUnauthorized","perMessageDeflate","threshold","transportOptions","upgrades","pingInterval","pingTimeout","pingTimeoutTimer","clone","EIO","priorWebsocketSuccess","createTransport","setTransport","onDrain","onError","probe","failed","onTransportOpen","onlyBinaryUpgrades","upgradeLosesBinary","send","upgrading","flush","freezeTransport","onerror","error","onTransportClose","onupgrade","to","onHandshake","resetPingTimeout","sendPacket","code","filterUpgrades","cleanupAndClose","waitForUpgrade","pingIntervalTimer","filteredUpgrades","j","empty","hasXHR2","responseType","forceBase64","Request","req","request","method","onData","pollXhr","async","xhr","extraHeaders","setDisableHeaderCheck","setRequestHeader","requestTimeout","hasXDR","onload","onLoad","responseText","onreadystatechange","status","document","requestsCount","requests","onSuccess","fromError","abort","attachEvent","unloadHandler","encodeBlobAsBase64","fileReader","FileReader","content","result","readAsDataURL","base64decoder","decodeBase64Packet","mapBinary","chars","Uint8Array","charCodeAt","arraybuffer","bytes","encoded1","encoded2","encoded3","encoded4","bufferLength","rNewline","rEscapedNewline","JSONPPolling","___eio","script","parentNode","removeChild","form","iframe","createElement","insertAt","getElementsByTagName","insertBefore","head","body","appendChild","navigator","userAgent","area","iframeId","className","style","position","top","left","target","setAttribute","complete","initIframe","html","action","submit","WebSocket","usingBrowserWebSocket","defaultBinaryType","isReactNative","product","toLowerCase","WS","check","protocols","headers","ws","addEventListeners","onmessage","Buffer","byteLength","MozWebSocket","packetData","_deconstructPacket","placeholder","_placeholder","newData","_reconstructPacket","ms","factor","pow","rand","random","deviation"],"mappings":";;;;;CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,GAAIH,GACe,iBAAZC,QACdA,QAAY,GAAID,IAEhBD,EAAS,GAAIC,IARf,CAU0B,oBAATK,KACAA,KACkB,oBAAXC,OACPA,OACkB,oBAAXC,OACPA,OAEAC,SAAS,cAATA,IAEP,WACV,O,YCnBE,IAAIC,EAAmB,GAGvB,SAASC,EAAoBC,GAG5B,GAAGF,EAAiBE,GACnB,OAAOF,EAAiBE,GAAUV,QAGnC,IAAIC,EAASO,EAAiBE,GAAY,CACzCC,EAAGD,EACHE,GAAG,EACHZ,QAAS,IAUV,OANAa,EAAQH,GAAUI,KAAKb,EAAOD,QAASC,EAAQA,EAAOD,QAASS,GAG/DR,EAAOW,GAAI,EAGJX,EAAOD,QA0Df,OArDAS,EAAoBM,EAAIF,EAGxBJ,EAAoBO,EAAIR,EAGxBC,EAAoBQ,EAAI,SAASjB,EAASkB,EAAMC,GAC3CV,EAAoBW,EAAEpB,EAASkB,IAClCG,OAAOC,eAAetB,EAASkB,EAAM,CAAEK,YAAY,EAAMC,IAAKL,KAKhEV,EAAoBgB,EAAI,SAASzB,GACX,oBAAX0B,QAA0BA,OAAOC,aAC1CN,OAAOC,eAAetB,EAAS0B,OAAOC,YAAa,CAAEC,MAAO,WAE7DP,OAAOC,eAAetB,EAAS,aAAc,CAAE4B,OAAO,KAQvDnB,EAAoBoB,EAAI,SAASD,EAAOE,GAEvC,GADU,EAAPA,IAAUF,EAAQnB,EAAoBmB,IAC/B,EAAPE,EAAU,OAAOF,EACpB,GAAW,EAAPE,GAA8B,iBAAVF,GAAsBA,GAASA,EAAMG,WAAY,OAAOH,EAChF,IAAII,EAAKX,OAAOY,OAAO,MAGvB,GAFAxB,EAAoBgB,EAAEO,GACtBX,OAAOC,eAAeU,EAAI,UAAW,CAAET,YAAY,EAAMK,MAAOA,IACtD,EAAPE,GAA4B,iBAATF,EAAmB,IAAI,IAAIM,KAAON,EAAOnB,EAAoBQ,EAAEe,EAAIE,EAAK,SAASA,GAAO,OAAON,EAAMM,IAAQC,KAAK,KAAMD,IAC9I,OAAOF,GAIRvB,EAAoB2B,EAAI,SAASnC,GAChC,IAAIkB,EAASlB,GAAUA,EAAO8B,WAC7B,WAAwB,OAAO9B,EAAgB,SAC/C,WAA8B,OAAOA,GAEtC,OADAQ,EAAoBQ,EAAEE,EAAQ,IAAKA,GAC5BA,GAIRV,EAAoBW,EAAI,SAASiB,EAAQC,GAAY,OAAOjB,OAAOkB,UAAUC,eAAe1B,KAAKuB,EAAQC,IAGzG7B,EAAoBgC,EAAI,GAIjBhC,EAAoBA,EAAoBiC,EAAI,I,kBCnErD,SAASC,EAAQC,GACf,GAAIA,EAAK,OAWX,SAAeA,GACb,IAAK,IAAIV,KAAOS,EAAQJ,UACtBK,EAAIV,GAAOS,EAAQJ,UAAUL,GAE/B,OAAOU,EAfSC,CAAMD,GAVtB3C,EAAOD,QAAU2C,EAqCnBA,EAAQJ,UAAUO,GAClBH,EAAQJ,UAAUQ,iBAAmB,SAASC,EAAOC,GAInD,OAHAC,KAAKC,WAAaD,KAAKC,YAAc,IACpCD,KAAKC,WAAW,IAAMH,GAASE,KAAKC,WAAW,IAAMH,IAAU,IAC7DI,KAAKH,GACDC,MAaTP,EAAQJ,UAAUc,KAAO,SAASL,EAAOC,GACvC,SAASH,IACPI,KAAKI,IAAIN,EAAOF,GAChBG,EAAGM,MAAML,KAAMM,WAKjB,OAFAV,EAAGG,GAAKA,EACRC,KAAKJ,GAAGE,EAAOF,GACRI,MAaTP,EAAQJ,UAAUe,IAClBX,EAAQJ,UAAUkB,eAClBd,EAAQJ,UAAUmB,mBAClBf,EAAQJ,UAAUoB,oBAAsB,SAASX,EAAOC,GAItD,GAHAC,KAAKC,WAAaD,KAAKC,YAAc,GAGjC,GAAKK,UAAUI,OAEjB,OADAV,KAAKC,WAAa,GACXD,KAIT,IAUIW,EAVAC,EAAYZ,KAAKC,WAAW,IAAMH,GACtC,IAAKc,EAAW,OAAOZ,KAGvB,GAAI,GAAKM,UAAUI,OAEjB,cADOV,KAAKC,WAAW,IAAMH,GACtBE,KAKT,IAAK,IAAIvC,EAAI,EAAGA,EAAImD,EAAUF,OAAQjD,IAEpC,IADAkD,EAAKC,EAAUnD,MACJsC,GAAMY,EAAGZ,KAAOA,EAAI,CAC7Ba,EAAUC,OAAOpD,EAAG,GACpB,MAUJ,OAJyB,IAArBmD,EAAUF,eACLV,KAAKC,WAAW,IAAMH,GAGxBE,MAWTP,EAAQJ,UAAUyB,KAAO,SAAShB,GAChCE,KAAKC,WAAaD,KAAKC,YAAc,GAKrC,IAHA,IAAIc,EAAO,IAAIC,MAAMV,UAAUI,OAAS,GACpCE,EAAYZ,KAAKC,WAAW,IAAMH,GAE7BrC,EAAI,EAAGA,EAAI6C,UAAUI,OAAQjD,IACpCsD,EAAKtD,EAAI,GAAK6C,UAAU7C,GAG1B,GAAImD,EAEG,CAAInD,EAAI,EAAb,IAAK,IAAWwD,GADhBL,EAAYA,EAAUM,MAAM,IACIR,OAAQjD,EAAIwD,IAAOxD,EACjDmD,EAAUnD,GAAG4C,MAAML,KAAMe,GAI7B,OAAOf,MAWTP,EAAQJ,UAAU8B,UAAY,SAASrB,GAErC,OADAE,KAAKC,WAAaD,KAAKC,YAAc,GAC9BD,KAAKC,WAAW,IAAMH,IAAU,IAWzCL,EAAQJ,UAAU+B,aAAe,SAAStB,GACxC,QAAUE,KAAKmB,UAAUrB,GAAOY,S,gBC7KlC,IAAMW,EAAeC,EAAQ,IACvBC,EAAeD,EAAQ,IAEvBE,EAAYC,OAAOC,aAAa,IAgCtC3E,EAAOD,QAAU,CACf6E,SAAU,EACVN,eACAO,cAjCoB,SAACC,EAASC,GAE9B,IAAMpB,EAASmB,EAAQnB,OACjBqB,EAAiB,IAAIf,MAAMN,GAC7BsB,EAAQ,EAEZH,EAAQI,SAAQ,SAACC,EAAQzE,GAEvB4D,EAAaa,GAAQ,GAAO,SAAAC,GAC1BJ,EAAetE,GAAK0E,IACdH,IAAUtB,GACdoB,EAASC,EAAeK,KAAKZ,WAuBnCD,eACAc,cAlBoB,SAACC,EAAgBC,GAGrC,IAFA,IAAMR,EAAiBO,EAAeE,MAAMhB,GACtCK,EAAU,GACPpE,EAAI,EAAGA,EAAIsE,EAAerB,OAAQjD,IAAK,CAC9C,IAAMgF,EAAgBlB,EAAaQ,EAAetE,GAAI8E,GAEtD,GADAV,EAAQ3B,KAAKuC,GACc,UAAvBA,EAAcC,KAChB,MAGJ,OAAOb,K,cChCT9E,EAAOD,QACe,oBAATI,KACFA,KACoB,oBAAXC,OACTA,OAEAE,SAAS,cAATA,I,ytCCNX,IAAMsF,EAASrB,EAAQ,GAGjBsB,E,sQAOJ,WAAYC,GAAM,a,4FAAA,UAChB,gBAEKA,KAAOA,EACZ,EAAKC,MAAQD,EAAKC,MAClB,EAAKC,WAAa,GAClB,EAAKC,OAASH,EAAKG,OANH,E,6CAgBVC,EAAKC,GACX,IAAMC,EAAM,IAAIC,MAAMH,GAItB,OAHAE,EAAIT,KAAO,iBACXS,EAAIE,YAAcH,EAClBlD,KAAKc,KAAK,QAASqC,GACZnD,O,6BAcP,MALI,WAAaA,KAAK+C,YAAc,KAAO/C,KAAK+C,aAC9C/C,KAAK+C,WAAa,UAClB/C,KAAKsD,UAGAtD,O,8BAcP,MALI,YAAcA,KAAK+C,YAAc,SAAW/C,KAAK+C,aACnD/C,KAAKuD,UACLvD,KAAKwD,WAGAxD,O,2BASJ6B,GACH,GAAI,SAAW7B,KAAK+C,WAGlB,MAAM,IAAIK,MAAM,sBAFhBpD,KAAKyD,MAAM5B,K,+BAYb7B,KAAK+C,WAAa,OAClB/C,KAAK0D,UAAW,EAChB1D,KAAKc,KAAK,U,6BASL6C,GACL,IAAMzB,EAASS,EAAOpB,aAAaoC,EAAM3D,KAAKgD,OAAOT,YACrDvC,KAAK4D,SAAS1B,K,+BAMPA,GACPlC,KAAKc,KAAK,SAAUoB,K,gCASpBlC,KAAK+C,WAAa,SAClB/C,KAAKc,KAAK,c,8BA/GEQ,EAAQ,IAmHxBvE,EAAOD,QAAU8F,G,cC5GjB9F,EAAQ+G,OAAS,SAAUnE,GACzB,IAAIoE,EAAM,GAEV,IAAK,IAAIrG,KAAKiC,EACRA,EAAIJ,eAAe7B,KACjBqG,EAAIpD,SAAQoD,GAAO,KACvBA,GAAOC,mBAAmBtG,GAAK,IAAMsG,mBAAmBrE,EAAIjC,KAIhE,OAAOqG,GAUThH,EAAQkH,OAAS,SAASC,GAGxB,IAFA,IAAIC,EAAM,GACNC,EAAQF,EAAGzB,MAAM,KACZ/E,EAAI,EAAGC,EAAIyG,EAAMzD,OAAQjD,EAAIC,EAAGD,IAAK,CAC5C,IAAI2G,EAAOD,EAAM1G,GAAG+E,MAAM,KAC1B0B,EAAIG,mBAAmBD,EAAK,KAAOC,mBAAmBD,EAAK,IAE7D,OAAOF,I,oqDClCT/F,OAAOC,eAAetB,EAAS,aAAc,CAAE4B,OAAO,IACtD5B,EAAQwH,QAAUxH,EAAQyH,QAAUzH,EAAQ0H,WAAa1H,EAAQ6E,cAAW,EAC5E,IAWI6C,EAXE/E,EAAU6B,EAAQ,GAClBmD,EAAWnD,EAAQ,IACnBoD,EAAcpD,EAAQ,IAQ5BxE,EAAQ6E,SAAW,EAEnB,SAAW6C,GACPA,EAAWA,EAAU,QAAc,GAAK,UACxCA,EAAWA,EAAU,WAAiB,GAAK,aAC3CA,EAAWA,EAAU,MAAY,GAAK,QACtCA,EAAWA,EAAU,IAAU,GAAK,MACpCA,EAAWA,EAAU,cAAoB,GAAK,gBAC9CA,EAAWA,EAAU,aAAmB,GAAK,eAC7CA,EAAWA,EAAU,WAAiB,GAAK,aAP/C,CAQGA,EAAa1H,EAAQ0H,aAAe1H,EAAQ0H,WAAa,K,IAItDD,E,2EAOK7E,GAGH,OAAIA,EAAIgD,OAAS8B,EAAWG,OAASjF,EAAIgD,OAAS8B,EAAWI,MACrDF,EAAYG,UAAUnF,GAQvB,CAACM,KAAK8E,eAAepF,KAPpBA,EAAIgD,KACAhD,EAAIgD,OAAS8B,EAAWG,MAClBH,EAAWO,aACXP,EAAWQ,WACdhF,KAAKiF,eAAevF,M,qCAQxBA,GAEX,IAAIoE,EAAM,GAAKpE,EAAIgD,KAqBnB,OAnBIhD,EAAIgD,OAAS8B,EAAWO,cACxBrF,EAAIgD,OAAS8B,EAAWQ,aACxBlB,GAAOpE,EAAIwF,YAAc,KAIzBxF,EAAIyF,KAAO,MAAQzF,EAAIyF,MACvBrB,GAAOpE,EAAIyF,IAAM,KAGjB,MAAQzF,EAAI0F,KACZtB,GAAOpE,EAAI0F,IAGX,MAAQ1F,EAAIiE,OACZG,GAAOuB,KAAKC,UAAU5F,EAAIiE,OAIvBG,I,qCAOIpE,GACX,IAAM6F,EAAiBd,EAASe,kBAAkB9F,GAC5C+F,EAAOzF,KAAK8E,eAAeS,EAAerD,QAC1CwD,EAAUH,EAAeG,QAE/B,OADAA,EAAQC,QAAQF,GACTC,M,KAGf5I,EAAQyH,QAAUA,E,IAMZD,E,gQACF,aAAc,8B,sCAQV5E,GACA,IAAIwC,EACJ,GAAmB,iBAARxC,GACPwC,EAASlC,KAAK4F,aAAalG,IAChBgD,OAAS8B,EAAWO,cAC3B7C,EAAOQ,OAAS8B,EAAWQ,YAE3BhF,KAAK6F,cAAgB,IAAIC,EAAoB5D,GAElB,IAAvBA,EAAOgD,aACP,wCAAW,UAAWhD,IAK1B,wCAAW,UAAWA,OAGzB,KAAIwC,EAAYqB,SAASrG,KAAQA,EAAIsG,OAetC,MAAM,IAAI5C,MAAM,iBAAmB1D,GAbnC,IAAKM,KAAK6F,cACN,MAAM,IAAIzC,MAAM,qDAGhBlB,EAASlC,KAAK6F,cAAcI,eAAevG,MAGvCM,KAAK6F,cAAgB,KACrB,wCAAW,UAAW3D,O,mCAczB4B,GACT,IAAIrG,EAAI,EAEF8B,EAAI,CACNmD,KAAMwD,OAAOpC,EAAIqC,OAAO,KAE5B,QAA2BC,IAAvB5B,EAAWjF,EAAEmD,MACb,MAAM,IAAIU,MAAM,uBAAyB7D,EAAEmD,MAG/C,GAAInD,EAAEmD,OAAS8B,EAAWO,cACtBxF,EAAEmD,OAAS8B,EAAWQ,WAAY,CAElC,IADA,IAAMqB,EAAQ5I,EAAI,EACS,MAApBqG,EAAIqC,SAAS1I,IAAcA,GAAKqG,EAAIpD,SAC3C,IAAM4F,EAAMxC,EAAIyC,UAAUF,EAAO5I,GACjC,GAAI6I,GAAOJ,OAAOI,IAA0B,MAAlBxC,EAAIqC,OAAO1I,GACjC,MAAM,IAAI2F,MAAM,uBAEpB7D,EAAE2F,YAAcgB,OAAOI,GAG3B,GAAI,MAAQxC,EAAIqC,OAAO1I,EAAI,GAAI,CAE3B,IADA,IAAM4I,EAAQ5I,EAAI,IACTA,GAAG,CAER,GAAI,MADMqG,EAAIqC,OAAO1I,GAEjB,MACJ,GAAIA,IAAMqG,EAAIpD,OACV,MAERnB,EAAE4F,IAAMrB,EAAIyC,UAAUF,EAAO5I,QAG7B8B,EAAE4F,IAAM,IAGZ,IAAMqB,EAAO1C,EAAIqC,OAAO1I,EAAI,GAC5B,GAAI,KAAO+I,GAAQN,OAAOM,IAASA,EAAM,CAErC,IADA,IAAMH,EAAQ5I,EAAI,IACTA,GAAG,CACR,IAAMK,EAAIgG,EAAIqC,OAAO1I,GACrB,GAAI,MAAQK,GAAKoI,OAAOpI,IAAMA,EAAG,GAC3BL,EACF,MAEJ,GAAIA,IAAMqG,EAAIpD,OACV,MAERnB,EAAE6F,GAAKc,OAAOpC,EAAIyC,UAAUF,EAAO5I,EAAI,IAG3C,GAAIqG,EAAIqC,SAAS1I,GAAI,CACjB,IAAMgJ,EAsClB,SAAkB3C,GACd,IACI,OAAOuB,KAAKqB,MAAM5C,GAEtB,MAAO6C,GACH,OAAO,GA3CaC,CAAS9C,EAAI+C,OAAOpJ,IACpC,IAAI6G,EAAQwC,eAAevH,EAAEmD,KAAM+D,GAI/B,MAAM,IAAIrD,MAAM,mBAHhB7D,EAAEoE,KAAO8C,EAQjB,OAAOlH,I,gCAsBHS,KAAK6F,eACL7F,KAAK6F,cAAckB,4B,sCArBLrE,EAAM+D,GACxB,OAAQ/D,GACJ,KAAK8B,EAAWwC,QACZ,MAA0B,WAAnB,EAAOP,GAClB,KAAKjC,EAAWyC,WACZ,YAAmBb,IAAZK,EACX,KAAKjC,EAAW0C,cACZ,MAA0B,iBAAZT,GAA2C,WAAnB,EAAOA,GACjD,KAAKjC,EAAWG,MAChB,KAAKH,EAAWO,aACZ,OAAO/D,MAAMmG,QAAQV,IAAkC,iBAAfA,EAAQ,GACpD,KAAKjC,EAAWI,IAChB,KAAKJ,EAAWQ,WACZ,OAAOhE,MAAMmG,QAAQV,Q,GAhIfhH,GA4ItB3C,EAAQwH,QAAUA,E,IAiBZwB,E,WACF,WAAY5D,GAAQ,UAChBlC,KAAKkC,OAASA,EACdlC,KAAK0F,QAAU,GACf1F,KAAKoH,UAAYlF,E,iDAUNmF,GAEX,GADArH,KAAK0F,QAAQxF,KAAKmH,GACdrH,KAAK0F,QAAQhF,SAAWV,KAAKoH,UAAUlC,YAAa,CAEpD,IAAMhD,EAASuC,EAAS6C,kBAAkBtH,KAAKoH,UAAWpH,KAAK0F,SAE/D,OADA1F,KAAK+G,yBACE7E,EAEX,OAAO,O,+CAMPlC,KAAKoH,UAAY,KACjBpH,KAAK0F,QAAU,O,oBClRvB,IAAI6B,EAAK,0OAELC,EAAQ,CACR,SAAU,WAAY,YAAa,WAAY,OAAQ,WAAY,OAAQ,OAAQ,WAAY,OAAQ,YAAa,OAAQ,QAAS,UAGzIzK,EAAOD,QAAU,SAAkBgH,GAC/B,IAAI2D,EAAM3D,EACN4D,EAAI5D,EAAI6D,QAAQ,KAChBhB,EAAI7C,EAAI6D,QAAQ,MAEV,GAAND,IAAiB,GAANf,IACX7C,EAAMA,EAAIyC,UAAU,EAAGmB,GAAK5D,EAAIyC,UAAUmB,EAAGf,GAAGiB,QAAQ,KAAM,KAAO9D,EAAIyC,UAAUI,EAAG7C,EAAIpD,SAO9F,IAJA,IAmCmBoC,EACfa,EApCA9F,EAAI0J,EAAGM,KAAK/D,GAAO,IACnBgE,EAAM,GACNrK,EAAI,GAEDA,KACHqK,EAAIN,EAAM/J,IAAMI,EAAEJ,IAAM,GAa5B,OAVU,GAANiK,IAAiB,GAANf,IACXmB,EAAIC,OAASN,EACbK,EAAIE,KAAOF,EAAIE,KAAKzB,UAAU,EAAGuB,EAAIE,KAAKtH,OAAS,GAAGkH,QAAQ,KAAM,KACpEE,EAAIG,UAAYH,EAAIG,UAAUL,QAAQ,IAAK,IAAIA,QAAQ,IAAK,IAAIA,QAAQ,KAAM,KAC9EE,EAAII,SAAU,GAGlBJ,EAAIK,UAMR,SAAmBzI,EAAK0I,GACpB,IACIC,EAAQD,EAAKR,QADN,WACoB,KAAKpF,MAAM,KAEjB,KAArB4F,EAAKvB,OAAO,EAAG,IAA6B,IAAhBuB,EAAK1H,QACjC2H,EAAMxH,OAAO,EAAG,GAEmB,KAAnCuH,EAAKvB,OAAOuB,EAAK1H,OAAS,EAAG,IAC7B2H,EAAMxH,OAAOwH,EAAM3H,OAAS,EAAG,GAGnC,OAAO2H,EAjBSF,CAAUL,EAAKA,EAAG,MAClCA,EAAIQ,UAmBexF,EAnBUgF,EAAG,MAoB5BnE,EAAO,GAEXb,EAAM8E,QAAQ,6BAA6B,SAAUW,EAAIC,EAAIC,GACrDD,IACA7E,EAAK6E,GAAMC,MAIZ9E,GA1BAmE,I,6gDCvCX3J,OAAOC,eAAetB,EAAS,aAAc,CAAE4B,OAAO,IACtD5B,EAAQ4L,aAAU,EAClB,IAAMC,EAAMrH,EAAQ,IACdsH,EAAWtH,EAAQ,IACnB7B,EAAU6B,EAAQ,GAClBqB,EAASrB,EAAQ,GACjBuH,EAAOvH,EAAQ,IACfrC,EAAOqC,EAAQ,IACfwH,EAAUxH,EAAQ,IAGlBoH,E,sQACF,WAAYZ,EAAKjF,GAAM,O,4FAAA,UACnB,gBACKkG,KAAO,GACZ,EAAKC,KAAO,GACRlB,GAAO,WAAa,EAAOA,KAC3BjF,EAAOiF,EACPA,OAAM1B,IAEVvD,EAAOA,GAAQ,IACVuF,KAAOvF,EAAKuF,MAAQ,aACzB,EAAKvF,KAAOA,EACZ,EAAKoG,cAAmC,IAAtBpG,EAAKoG,cACvB,EAAKC,qBAAqBrG,EAAKqG,sBAAwBC,KACvD,EAAKC,kBAAkBvG,EAAKuG,mBAAqB,KACjD,EAAKC,qBAAqBxG,EAAKwG,sBAAwB,KACvD,EAAKC,oBAAoBzG,EAAKyG,qBAAuB,IACrD,EAAKC,QAAU,IAAIT,EAAQ,CACvBU,IAAK,EAAKJ,oBACVK,IAAK,EAAKJ,uBACVK,OAAQ,EAAKJ,wBAEjB,EAAKK,QAAQ,MAAQ9G,EAAK8G,QAAU,IAAQ9G,EAAK8G,SACjD,EAAKC,YAAc,SACnB,EAAK9B,IAAMA,EACX,IAAM+B,EAAUhH,EAAKF,QAAUA,EAxBZ,OAyBnB,EAAKmH,QAAU,IAAID,EAAQtF,QAC3B,EAAKwF,QAAU,IAAIF,EAAQvF,QAC3B,EAAK0F,cAAoC,IAArBnH,EAAKoH,YACrB,EAAKD,cACL,EAAKE,OA7BU,E,kDA+BVC,GACT,OAAK7J,UAAUI,QAEfV,KAAKoK,gBAAkBD,EAChBnK,MAFIA,KAAKoK,gB,2CAICD,GACjB,YAAU/D,IAAN+D,EACOnK,KAAKqK,uBAChBrK,KAAKqK,sBAAwBF,EACtBnK,Q,wCAEOmK,GACd,IAAIG,EACJ,YAAUlE,IAAN+D,EACOnK,KAAKuK,oBAChBvK,KAAKuK,mBAAqBJ,EACF,QAAvBG,EAAKtK,KAAKuJ,eAA4B,IAAPe,GAAyBA,EAAGE,OAAOL,GAC5DnK,Q,0CAESmK,GAChB,IAAIG,EACJ,YAAUlE,IAAN+D,EACOnK,KAAKyK,sBAChBzK,KAAKyK,qBAAuBN,EACJ,QAAvBG,EAAKtK,KAAKuJ,eAA4B,IAAPe,GAAyBA,EAAGI,UAAUP,GAC/DnK,Q,2CAEUmK,GACjB,IAAIG,EACJ,YAAUlE,IAAN+D,EACOnK,KAAK2K,uBAChB3K,KAAK2K,sBAAwBR,EACL,QAAvBG,EAAKtK,KAAKuJ,eAA4B,IAAPe,GAAyBA,EAAGM,OAAOT,GAC5DnK,Q,8BAEHmK,GACJ,OAAK7J,UAAUI,QAEfV,KAAK6K,SAAWV,EACTnK,MAFIA,KAAK6K,W,8CAYX7K,KAAK8K,eACN9K,KAAKoK,eACqB,IAA1BpK,KAAKuJ,QAAQwB,UAEb/K,KAAKgL,c,2BAURjL,GAAI,WAGL,IAAKC,KAAK4J,YAAYjC,QAAQ,QAC1B,OAAO3H,KAGXA,KAAKiL,OAAStC,EAAI3I,KAAK8H,IAAK9H,KAAK6C,MACjC,IAAMG,EAAShD,KAAKiL,OACd/N,EAAO8C,KACbA,KAAK4J,YAAc,UACnB5J,KAAKkL,eAAgB,EAErB,IAAMC,EAAUtC,EAAKjJ,GAAGoD,EAAQ,QAAQ,WACpC9F,EAAKkO,SACLrL,GAAMA,OAGJsL,EAAWxC,EAAKjJ,GAAGoD,EAAQ,SAAS,SAACG,GAGvCjG,EAAKoO,UACLpO,EAAK0M,YAAc,SACnB,kCAAW,QAASzG,GAChBpD,EACAA,EAAGoD,GAIHjG,EAAKqO,0BAGb,IAAI,IAAUvL,KAAK6K,SAAU,CACzB,IAAMlB,EAAU3J,KAAK6K,SAGL,IAAZlB,GACAwB,EAAQK,UAGZ,IAAMC,EAAQC,YAAW,WAGrBP,EAAQK,UACRxI,EAAO2I,QACP3I,EAAOlC,KAAK,QAAS,IAAIsC,MAAM,cAChCuG,GACH3J,KAAKgJ,KAAK9I,KAAK,CACXsL,QAAS,WACLI,aAAaH,MAMzB,OAFAzL,KAAKgJ,KAAK9I,KAAKiL,GACfnL,KAAKgJ,KAAK9I,KAAKmL,GACRrL,O,8BAQHD,GACJ,OAAOC,KAAKkK,KAAKnK,K,+BAWjBC,KAAKsL,UAELtL,KAAK4J,YAAc,OACnB,wCAAW,QAEX,IAAM5G,EAAShD,KAAKiL,OACpBjL,KAAKgJ,KAAK9I,KAAK2I,EAAKjJ,GAAGoD,EAAQ,OAAQ/D,EAAKe,KAAM,WAAY6I,EAAKjJ,GAAGoD,EAAQ,OAAQ/D,EAAKe,KAAM,WAAY6I,EAAKjJ,GAAGoD,EAAQ,QAAS/D,EAAKe,KAAM,YAAa6I,EAAKjJ,GAAGoD,EAAQ,QAAS/D,EAAKe,KAAM,YAAa6I,EAAKjJ,GAAGI,KAAK+J,QAAS,UAAW9K,EAAKe,KAAM,iB,+BAQ3P,wCAAW,U,6BAOR2D,GACH3D,KAAK+J,QAAQ8B,IAAIlI,K,gCAOXzB,GACN,wCAAW,SAAUA,K,8BAOjBiB,GAGJ,wCAAW,QAASA,K,6BAQjBgC,EAAKtC,GACR,IAAIG,EAAShD,KAAK+I,KAAK5D,GAKvB,OAJKnC,IACDA,EAAS,IAAI4F,EAASkD,OAAO9L,KAAMmF,EAAKtC,GACxC7C,KAAK+I,KAAK5D,GAAOnC,GAEdA,I,+BAQFA,GAEL,IADA,IACA,MADa7E,OAAO4N,KAAK/L,KAAK+I,MAC9B,eAAwB,CAAnB,IAAM5D,EAAG,KAEV,GADenF,KAAK+I,KAAK5D,GACd6G,OAGP,OAGRhM,KAAKiM,W,8BAQD/J,GAGAA,EAAOY,OAAyB,IAAhBZ,EAAOQ,OACvBR,EAAOiD,KAAO,IAAMjD,EAAOY,OAE/B,IADA,IAAMf,EAAiB/B,KAAK8J,QAAQjG,OAAO3B,GAClCzE,EAAI,EAAGA,EAAIsE,EAAerB,OAAQjD,IACvCuC,KAAKiL,OAAOxH,MAAM1B,EAAetE,GAAIyE,EAAOgK,W,gCAYhD,IADA,IAAMC,EAAanM,KAAKgJ,KAAKtI,OACpBjD,EAAI,EAAGA,EAAI0O,EAAY1O,IAChBuC,KAAKgJ,KAAKoD,QAClBZ,UAERxL,KAAK+J,QAAQyB,Y,+BAUbxL,KAAKkL,eAAgB,EACrBlL,KAAK8K,eAAgB,EACjB,YAAc9K,KAAK4J,aAGnB5J,KAAKsL,UAETtL,KAAKuJ,QAAQ8C,QACbrM,KAAK4J,YAAc,SACf5J,KAAKiL,QACLjL,KAAKiL,OAAOU,U,mCAQhB,OAAO3L,KAAKiM,W,8BAORK,GAGJtM,KAAKsL,UACLtL,KAAKuJ,QAAQ8C,QACbrM,KAAK4J,YAAc,SACnB,wCAAW,QAAS0C,GAChBtM,KAAKoK,gBAAkBpK,KAAKkL,eAC5BlL,KAAKgL,c,kCAQD,WACR,GAAIhL,KAAK8K,eAAiB9K,KAAKkL,cAC3B,OAAOlL,KACX,IAAM9C,EAAO8C,KACb,GAAIA,KAAKuJ,QAAQwB,UAAY/K,KAAKqK,sBAG9BrK,KAAKuJ,QAAQ8C,QACb,wCAAW,oBACXrM,KAAK8K,eAAgB,MAEpB,CACD,IAAMyB,EAAQvM,KAAKuJ,QAAQiD,WAG3BxM,KAAK8K,eAAgB,EACrB,IAAMW,EAAQC,YAAW,WACjBxO,EAAKgO,gBAIT,kCAAW,oBAAqBhO,EAAKqM,QAAQwB,UAEzC7N,EAAKgO,eAEThO,EAAKgN,MAAK,SAAC/G,GACHA,GAGAjG,EAAK4N,eAAgB,EACrB5N,EAAK8N,YACL,kCAAW,kBAAmB7H,IAK9BjG,EAAKuP,oBAGdF,GACHvM,KAAKgJ,KAAK9I,KAAK,CACXsL,QAAS,WACLI,aAAaH,S,oCAWzB,IAAMiB,EAAU1M,KAAKuJ,QAAQwB,SAC7B/K,KAAK8K,eAAgB,EACrB9K,KAAKuJ,QAAQ8C,QACb,wCAAW,YAAaK,Q,8BA7XVjN,GAgYtB3C,EAAQ4L,QAAUA,G,gBC5YlB,IAAMiE,EAAiBrL,EAAQ,GACzBsL,EAAMtL,EAAQ,IACduL,EAAQvL,EAAQ,IAChBwL,EAAYxL,EAAQ,IAE1BxE,EAAQiQ,QAUR,SAAiBlK,GACf,IACImK,GAAK,EACLC,GAAK,EACHC,GAAQ,IAAUrK,EAAKqK,MAE7B,GAAwB,oBAAbC,SAA0B,CACnC,IAAMC,EAAQ,WAAaD,SAASxL,SAChC0L,EAAOF,SAASE,KAGfA,IACHA,EAAOD,EAAQ,IAAM,IAGvBJ,EAAKnK,EAAKyK,WAAaH,SAASG,UAAYD,IAASxK,EAAKwK,KAC1DJ,EAAKpK,EAAK0K,SAAWH,EAOvB,GAJAvK,EAAK2K,QAAUR,EACfnK,EAAK4K,QAAUR,EAGX,SAFE,IAAIN,EAAe9J,KAEHA,EAAK6K,WACzB,OAAO,IAAId,EAAI/J,GAEf,IAAKqK,EAAO,MAAM,IAAI9J,MAAM,kBAC5B,OAAO,IAAIyJ,EAAMhK,IApCrB/F,EAAQgQ,UAAYA,G,gBCJpB,IAAMa,EAAUrM,EAAQ,IAClBsM,EAAatM,EAAQ,GAE3BvE,EAAOD,QAAU,SAAS+F,GACxB,IAAM2K,EAAU3K,EAAK2K,QAIfC,EAAU5K,EAAK4K,QAIfI,EAAahL,EAAKgL,WAGxB,IACE,GAAI,oBAAuBlB,kBAAoBa,GAAWG,GACxD,OAAO,IAAIhB,eAEb,MAAOhG,IAKT,IACE,GAAI,oBAAuBmH,iBAAmBL,GAAWI,EACvD,OAAO,IAAIC,eAEb,MAAOnH,IAET,IAAK6G,EACH,IACE,OAAO,IAAII,EAAW,CAAC,UAAUG,OAAO,UAAU3L,KAAK,OACrD,qBAEF,MAAOuE,O,uzCCrCb,IAAM/D,EAAYtB,EAAQ,GACpB0M,EAAU1M,EAAQ,GAClBqB,EAASrB,EAAQ,GACjB2M,EAAQ3M,EAAQ,IAKhB4M,E,0WAeFlO,KAAKmO,S,4BASDC,GACJ,IAAMlR,EAAO8C,KAIb,SAASqO,IAGPnR,EAAK6F,WAAa,SAClBqL,IAGF,GATApO,KAAK+C,WAAa,UASd/C,KAAK+M,UAAY/M,KAAK0D,SAAU,CAClC,IAAI4K,EAAQ,EAERtO,KAAK+M,UAGPuB,IACAtO,KAAKG,KAAK,gBAAgB,aAGtBmO,GAASD,QAIVrO,KAAK0D,WAGR4K,IACAtO,KAAKG,KAAK,SAAS,aAGfmO,GAASD,aAIfA,M,6BAYFrO,KAAK+M,SAAU,EACf/M,KAAKuO,SACLvO,KAAKc,KAAK,U,6BAQL6C,GACL,IAAMzG,EAAO8C,KAoBb2C,EAAON,cAAcsB,EAAM3D,KAAKgD,OAAOT,YAAYN,SAjBlC,SAASC,EAAQsM,EAAOF,GAOvC,GALI,YAAcpR,EAAK6F,YAA8B,SAAhBb,EAAOQ,MAC1CxF,EAAKuR,SAIH,UAAYvM,EAAOQ,KAErB,OADAxF,EAAKsG,WACE,EAITtG,EAAK0G,SAAS1B,MAOZ,WAAalC,KAAK+C,aAEpB/C,KAAK+M,SAAU,EACf/M,KAAKc,KAAK,gBAEN,SAAWd,KAAK+C,YAClB/C,KAAKmO,U,gCAcT,IAAMjR,EAAO8C,KAEb,SAAS2L,IAGPzO,EAAKuG,MAAM,CAAC,CAAEf,KAAM,WAGlB,SAAW1C,KAAK+C,WAGlB4I,IAMA3L,KAAKG,KAAK,OAAQwL,K,4BAWhB9J,GAAS,WACb7B,KAAK0D,UAAW,EAEhBf,EAAOf,cAAcC,GAAS,SAAA8B,GAC5B,EAAK+K,QAAQ/K,GAAM,WACjB,EAAKD,UAAW,EAChB,EAAK5C,KAAK,iB,4BAWd,IAAIgC,EAAQ9C,KAAK8C,OAAS,GACpB6L,EAAS3O,KAAK6C,KAAK0K,OAAS,QAAU,OACxCF,EAAO,GA4BX,OAzBI,IAAUrN,KAAK6C,KAAK+L,oBACtB9L,EAAM9C,KAAK6C,KAAKgM,gBAAkBZ,KAG/BjO,KAAK8O,gBAAmBhM,EAAMiM,MACjCjM,EAAMkM,IAAM,GAGdlM,EAAQkL,EAAQnK,OAAOf,GAIrB9C,KAAK6C,KAAKwK,OACR,UAAYsB,GAAqC,MAA3BzI,OAAOlG,KAAK6C,KAAKwK,OACtC,SAAWsB,GAAqC,KAA3BzI,OAAOlG,KAAK6C,KAAKwK,SAEzCA,EAAO,IAAMrN,KAAK6C,KAAKwK,MAIrBvK,EAAMpC,SACRoC,EAAQ,IAAMA,GAKd6L,EACA,QAHgD,IAArC3O,KAAK6C,KAAKyK,SAAS3F,QAAQ,KAI9B,IAAM3H,KAAK6C,KAAKyK,SAAW,IAAMtN,KAAK6C,KAAKyK,UACnDD,EACArN,KAAK6C,KAAKuF,KACVtF,I,2BA3MF,MAAO,e,8BALWF,GAqNtB7F,EAAOD,QAAUoR,G,cC7NjB,IAAMe,EAAe9Q,OAAOY,OAAO,MACnCkQ,EAAY,KAAW,IACvBA,EAAY,MAAY,IACxBA,EAAY,KAAW,IACvBA,EAAY,KAAW,IACvBA,EAAY,QAAc,IAC1BA,EAAY,QAAc,IAC1BA,EAAY,KAAW,IAEvB,IAAMC,EAAuB/Q,OAAOY,OAAO,MAC3CZ,OAAO4N,KAAKkD,GAAchN,SAAQ,SAAAjD,GAChCkQ,EAAqBD,EAAajQ,IAAQA,KAK5CjC,EAAOD,QAAU,CACfmS,eACAC,uBACAC,aALmB,CAAEzM,KAAM,QAASiB,KAAM,kB,6BCZ5C,IAKIyL,EALAC,EAAW,mEAAmE7M,MAAM,IAEpF8M,EAAM,GACNC,EAAO,EACP9R,EAAI,EAUR,SAASoG,EAAO2L,GACd,IAAIC,EAAU,GAEd,GACEA,EAAUJ,EAASG,EAjBV,IAiB0BC,EACnCD,EAAME,KAAKC,MAAMH,EAlBR,UAmBFA,EAAM,GAEf,OAAOC,EA0BT,SAASxB,IACP,IAAI2B,EAAM/L,GAAQ,IAAIgM,MAEtB,OAAID,IAAQR,GAAaG,EAAO,EAAGH,EAAOQ,GACnCA,EAAK,IAAK/L,EAAO0L,KAM1B,KAAO9R,EAzDM,GAyDMA,IAAK6R,EAAID,EAAS5R,IAAMA,EAK3CwQ,EAAMpK,OAASA,EACfoK,EAAMjK,OAhCN,SAAgBF,GACd,IAAIgM,EAAU,EAEd,IAAKrS,EAAI,EAAGA,EAAIqG,EAAIpD,OAAQjD,IAC1BqS,EAnCS,GAmCCA,EAAmBR,EAAIxL,EAAIqC,OAAO1I,IAG9C,OAAOqS,GA0BT/S,EAAOD,QAAUmR,G,cCnEjBlR,EAAOD,QAAQiT,KAAO,SAACrQ,GAAiB,2BAATsQ,EAAS,iCAATA,EAAS,kBACtC,OAAOA,EAAKC,QAAO,SAACC,EAAKC,GAEvB,OADAD,EAAIC,GAAKzQ,EAAIyQ,GACND,IACN,M,8hFCHL/R,OAAOC,eAAetB,EAAS,aAAc,CAAE4B,OAAO,IACtD5B,EAAQgP,YAAS,EACjB,IAAMsE,EAAqB9O,EAAQ,GAC7B7B,EAAU6B,EAAQ,GAClBuH,EAAOvH,EAAQ,IACfrC,EAAOqC,EAAQ,IAOf+O,EAAkBlS,OAAOmS,OAAO,CAClCC,QAAS,EACTC,cAAe,EACfC,WAAY,EACZC,cAAe,EAEfC,YAAa,EACbpQ,eAAgB,IAEduL,E,sQAMF,WAAY8E,EAAIzL,EAAKtC,GAAM,a,4FAAA,UACvB,gBACKgO,IAAM,EACX,EAAKC,KAAO,GACZ,EAAKC,cAAgB,GACrB,EAAKC,WAAa,GAClB,EAAKC,MAAQ,GACb,EAAKL,GAAKA,EACV,EAAKzL,IAAMA,EACX,EAAK0L,IAAM,EACX,EAAKC,KAAO,GACZ,EAAKC,cAAgB,GACrB,EAAKC,WAAa,GAClB,EAAKE,WAAY,EACjB,EAAKC,cAAe,EACpB,EAAKF,MAAQ,GACTpO,GAAQA,EAAKuO,OACb,EAAKA,KAAOvO,EAAKuO,MAEjB,EAAKR,GAAG5G,cACR,EAAKE,OApBc,E,iDA4BvB,IAAIlK,KAAKgJ,KAAT,CAEA,IAAM4H,EAAK5Q,KAAK4Q,GAChB5Q,KAAKgJ,KAAO,CACRH,EAAKjJ,GAAGgR,EAAI,OAAQ3R,EAAKe,KAAM,WAC/B6I,EAAKjJ,GAAGgR,EAAI,SAAU3R,EAAKe,KAAM,aACjC6I,EAAKjJ,GAAGgR,EAAI,QAAS3R,EAAKe,KAAM,gB,gCAepC,OAAIA,KAAKkR,YAETlR,KAAKqR,YACArR,KAAK4Q,GAAL,eACD5Q,KAAK4Q,GAAG1G,OACR,SAAWlK,KAAK4Q,GAAGhH,aACnB5J,KAAKoL,UALEpL,O,6BAYX,OAAOA,KAAKuQ,Y,6BAQF,2BAANxP,EAAM,yBAANA,EAAM,gBAGV,OAFAA,EAAK4E,QAAQ,WACb3F,KAAKc,KAAKT,MAAML,KAAMe,GACff,O,2BAUNsR,GACD,GAAIjB,EAAgB/Q,eAAegS,GAC/B,MAAM,IAAIlO,MAAM,IAAMkO,EAAK,8BAFjB,2BAANvQ,EAAM,iCAANA,EAAM,kBAIdA,EAAK4E,QAAQ2L,GACb,IAAMpP,EAAS,CACXQ,KAAM0N,EAAmB5L,WAAWG,MACpChB,KAAM5C,EAEVmB,QAAiB,IACjBA,EAAOgK,QAAQqF,UAAmC,IAAxBvR,KAAKiR,MAAMM,SAEjC,mBAAsBxQ,EAAKA,EAAKL,OAAS,KAGzCV,KAAK8Q,KAAK9Q,KAAK6Q,KAAO9P,EAAKyQ,MAC3BtP,EAAOkD,GAAKpF,KAAK6Q,OAErB,IAAMY,EAAsBzR,KAAK4Q,GAAG3F,QAChCjL,KAAK4Q,GAAG3F,OAAOyG,WACf1R,KAAK4Q,GAAG3F,OAAOyG,UAAUhO,SACvBiO,EAAgB3R,KAAKiR,MAAL,YAAyBQ,IAAwBzR,KAAKkR,WAY5E,OAXIS,IAIK3R,KAAKkR,UACVlR,KAAKkC,OAAOA,GAGZlC,KAAKgR,WAAW9Q,KAAKgC,IAEzBlC,KAAKiR,MAAQ,GACNjR,O,6BAQJkC,GACHA,EAAOiD,IAAMnF,KAAKmF,IAClBnF,KAAK4Q,GAAGgB,QAAQ1P,K,+BAOX,WAGmB,mBAAblC,KAAKoR,KACZpR,KAAKoR,MAAK,SAACzN,GACP,EAAKzB,OAAO,CAAEQ,KAAM0N,EAAmB5L,WAAWwC,QAASrD,YAI/D3D,KAAKkC,OAAO,CAAEQ,KAAM0N,EAAmB5L,WAAWwC,QAASrD,KAAM3D,KAAKoR,S,8BAStE9E,GAGJtM,KAAKkR,WAAY,EACjBlR,KAAKmR,cAAe,SACbnR,KAAKoF,GACZ,wCAAW,aAAckH,K,+BAQpBpK,GAEL,GADsBA,EAAOiD,MAAQnF,KAAKmF,IAG1C,OAAQjD,EAAOQ,MACX,KAAK0N,EAAmB5L,WAAWwC,QAC/B,GAAI9E,EAAOyB,MAAQzB,EAAOyB,KAAKoL,IAAK,CAChC,IAAM3J,EAAKlD,EAAOyB,KAAKoL,IACvB/O,KAAK6R,UAAUzM,QAGf,wCAAW,gBAAiB,IAAIhC,MAAM,8LAE1C,MACJ,KAAKgN,EAAmB5L,WAAWG,MAGnC,KAAKyL,EAAmB5L,WAAWO,aAC/B/E,KAAK8R,QAAQ5P,GACb,MACJ,KAAKkO,EAAmB5L,WAAWI,IAGnC,KAAKwL,EAAmB5L,WAAWQ,WAC/BhF,KAAK+R,MAAM7P,GACX,MACJ,KAAKkO,EAAmB5L,WAAWyC,WAC/BjH,KAAKgS,eACL,MACJ,KAAK5B,EAAmB5L,WAAW0C,cAC/B,IAAM/D,EAAM,IAAIC,MAAMlB,EAAOyB,KAAKsO,SAElC9O,EAAIQ,KAAOzB,EAAOyB,KAAKA,KACvB,wCAAW,gBAAiBR,M,8BAUhCjB,GACJ,IAAMnB,EAAOmB,EAAOyB,MAAQ,GAGxB,MAAQzB,EAAOkD,IAGfrE,EAAKb,KAAKF,KAAKkS,IAAIhQ,EAAOkD,KAE1BpF,KAAKkR,UACLlR,KAAKmS,UAAUpR,GAGff,KAAK+Q,cAAc7Q,KAAK/B,OAAOmS,OAAOvP,M,gCAGpCA,GACN,GAAIf,KAAKoS,eAAiBpS,KAAKoS,cAAc1R,OAAQ,CACjD,IADiD,MAC/BV,KAAKoS,cAAclR,SADY,IAEjD,2BAAkC,QACrBb,MAAML,KAAMe,GAHwB,+BAMrD,8BAAWV,MAAML,KAAMe,K,0BAOvBqE,GACA,IAAMlI,EAAO8C,KACTqS,GAAO,EACX,OAAO,WAEH,IAAIA,EAAJ,CAEAA,GAAO,EAJe,2BAANtR,EAAM,yBAANA,EAAM,gBAOtB7D,EAAKgF,OAAO,CACRQ,KAAM0N,EAAmB5L,WAAWI,IACpCQ,GAAIA,EACJzB,KAAM5C,Q,4BAUZmB,GACF,IAAMgQ,EAAMlS,KAAK8Q,KAAK5O,EAAOkD,IACzB,mBAAsB8M,IAGtBA,EAAI7R,MAAML,KAAMkC,EAAOyB,aAChB3D,KAAK8Q,KAAK5O,EAAOkD,O,gCAYtBA,GAGNpF,KAAKoF,GAAKA,EACVpF,KAAKkR,WAAY,EACjBlR,KAAKmR,cAAe,EACpB,wCAAW,WACXnR,KAAKsS,iB,qCAOM,WACXtS,KAAK+Q,cAAc9O,SAAQ,SAAClB,GAAD,OAAU,EAAKoR,UAAUpR,MACpDf,KAAK+Q,cAAgB,GACrB/Q,KAAKgR,WAAW/O,SAAQ,SAACC,GAAD,OAAY,EAAKA,OAAOA,MAChDlC,KAAKgR,WAAa,K,qCAUlBhR,KAAKwL,UACLxL,KAAKuS,QAAQ,0B,gCAUb,GAAIvS,KAAKgJ,KAAM,CAEX,IAAK,IAAIvL,EAAI,EAAGA,EAAIuC,KAAKgJ,KAAKtI,OAAQjD,IAClCuC,KAAKgJ,KAAKvL,GAAG+N,UAEjBxL,KAAKgJ,KAAO,KAEhBhJ,KAAK4Q,GAAL,SAAoB5Q,Q,mCAoBpB,OAXIA,KAAKkR,WAGLlR,KAAKkC,OAAO,CAAEQ,KAAM0N,EAAmB5L,WAAWyC,aAGtDjH,KAAKwL,UACDxL,KAAKkR,WAELlR,KAAKuS,QAAQ,wBAEVvS,O,8BASP,OAAOA,KAAKyQ,e,+BASPc,GAEL,OADAvR,KAAKiR,MAAMM,SAAWA,EACfvR,O,4BAoBLwS,GAGF,OAFAxS,KAAKoS,cAAgBpS,KAAKoS,eAAiB,GAC3CpS,KAAKoS,cAAclS,KAAKsS,GACjBxS,O,iCASAwS,GAGP,OAFAxS,KAAKoS,cAAgBpS,KAAKoS,eAAiB,GAC3CpS,KAAKoS,cAAczM,QAAQ6M,GACpBxS,O,6BAQJwS,GACH,IAAKxS,KAAKoS,cACN,OAAOpS,KAEX,GAAIwS,GAEA,IADA,IAAMrR,EAAYnB,KAAKoS,cACd3U,EAAI,EAAGA,EAAI0D,EAAUT,OAAQjD,IAClC,GAAI+U,IAAarR,EAAU1D,GAEvB,OADA0D,EAAUN,OAAOpD,EAAG,GACbuC,UAKfA,KAAKoS,cAAgB,GAEzB,OAAOpS,O,qCASP,OAAOA,KAAKoS,eAAiB,K,6BAxY7B,QAASpS,KAAKgJ,O,+BA+Ud,OADAhJ,KAAKiR,MAAL,UAAsB,EACfjR,U,8BA9XMP,GA0brB3C,EAAQgP,OAASA,G,kQC/cjB3N,OAAOC,eAAetB,EAAS,aAAc,CAAE4B,OAAO,IACtD5B,EAAQ+H,UAAY/H,EAAQiJ,cAAW,EACvC,IAAM0M,EAA+C,mBAAhBC,YAM/BC,EAAWxU,OAAOkB,UAAUsT,SAC5BC,EAAiC,mBAATC,MACT,oBAATA,MACoB,6BAAxBF,EAAS/U,KAAKiV,MAChBC,EAAiC,mBAATC,MACT,oBAATA,MACoB,6BAAxBJ,EAAS/U,KAAKmV,MAMtB,SAAShN,EAASrG,GACd,OAAS+S,IAA0B/S,aAAegT,aAlBvC,SAAChT,GACZ,MAAqC,mBAAvBgT,YAAYM,OACpBN,YAAYM,OAAOtT,GACnBA,EAAIuT,kBAAkBP,YAeqCM,CAAOtT,KACnEkT,GAAkBlT,aAAemT,MACjCC,GAAkBpT,aAAeqT,KAE1CjW,EAAQiJ,SAAWA,EA4BnBjJ,EAAQ+H,UA3BR,SAASA,EAAUnF,EAAKwT,GACpB,IAAKxT,GAAsB,WAAf,EAAOA,GACf,OAAO,EAEX,GAAIsB,MAAMmG,QAAQzH,GAAM,CACpB,IAAK,IAAIjC,EAAI,EAAGC,EAAIgC,EAAIgB,OAAQjD,EAAIC,EAAGD,IACnC,GAAIoH,EAAUnF,EAAIjC,IACd,OAAO,EAGf,OAAO,EAEX,GAAIsI,EAASrG,GACT,OAAO,EAEX,GAAIA,EAAIwT,QACkB,mBAAfxT,EAAIwT,QACU,IAArB5S,UAAUI,OACV,OAAOmE,EAAUnF,EAAIwT,UAAU,GAEnC,IAAK,IAAMlU,KAAOU,EACd,GAAIvB,OAAOkB,UAAUC,eAAe1B,KAAK8B,EAAKV,IAAQ6F,EAAUnF,EAAIV,IAChE,OAAO,EAGf,OAAO,I,6BCnDXb,OAAOC,eAAetB,EAAS,aAAc,CAAE4B,OAAO,IACtD5B,EAAQ8C,QAAK,EASb9C,EAAQ8C,GARR,SAAYF,EAAK4R,EAAIvR,GAEjB,OADAL,EAAIE,GAAG0R,EAAIvR,GACJ,CACHyL,QAAS,WACL9L,EAAIU,IAAIkR,EAAIvR,O,cCHxB,IAAImB,EAAQ,GAAGA,MAWfnE,EAAOD,QAAU,SAAS4C,EAAKK,GAE7B,GADI,iBAAmBA,IAAIA,EAAKL,EAAIK,IAChC,mBAAqBA,EAAI,MAAM,IAAIqD,MAAM,8BAC7C,IAAIrC,EAAOG,EAAMtD,KAAK0C,UAAW,GACjC,OAAO,WACL,OAAOP,EAAGM,MAAMX,EAAKqB,EAAKgN,OAAO7M,EAAMtD,KAAK0C,gB,kQCnBhDnC,OAAOC,eAAetB,EAAS,aAAc,CAAE4B,OAAO,IACtD5B,EAAQgP,OAAShP,EAAQ8T,GAAK9T,EAAQ4L,QAAU5L,EAAQ6E,cAAW,EACnE,IAAMwR,EAAQ7R,EAAQ,IAChB8R,EAAY9R,EAAQ,GACpBsH,EAAWtH,EAAQ,IACzBnD,OAAOC,eAAetB,EAAS,SAAU,CAAEuB,YAAY,EAAMC,IAAK,WAAc,OAAOsK,EAASkD,UAMhG/O,EAAOD,QAAUA,EAAUuW,EAI3B,IAAMC,EAASxW,EAAQyW,SAAW,GAClC,SAASF,EAAOvL,EAAKjF,GACE,WAAf,EAAOiF,KACPjF,EAAOiF,EACPA,OAAM1B,GAEVvD,EAAOA,GAAQ,GACf,IASI+N,EATE4C,EAASL,EAAMM,IAAI3L,GACnBC,EAASyL,EAAOzL,OAChB3C,EAAKoO,EAAOpO,GACZgD,EAAOoL,EAAOpL,KACdsL,EAAgBJ,EAAMlO,IAAOgD,KAAQkL,EAAMlO,GAAN,KAsB3C,OArBsBvC,EAAK8Q,UACvB9Q,EAAK,0BACL,IAAUA,EAAK+Q,WACfF,EAKA9C,EAAK,IAAIwC,EAAU1K,QAAQX,EAAQlF,IAG9ByQ,EAAMlO,KAGPkO,EAAMlO,GAAM,IAAIgO,EAAU1K,QAAQX,EAAQlF,IAE9C+N,EAAK0C,EAAMlO,IAEXoO,EAAO1Q,QAAUD,EAAKC,QACtBD,EAAKC,MAAQ0Q,EAAO1Q,OAEjB8N,EAAG5N,OAAOwQ,EAAOpL,KAAMvF,GAElC/F,EAAQ8T,GAAKyC,EAMb,IAAIjD,EAAqB9O,EAAQ,GACjCnD,OAAOC,eAAetB,EAAS,WAAY,CAAEuB,YAAY,EAAMC,IAAK,WAAc,OAAO8R,EAAmBzO,YAO5G7E,EAAQyT,QAAU8C,EAMlB,IAAIQ,EAAYvS,EAAQ,GACxBnD,OAAOC,eAAetB,EAAS,UAAW,CAAEuB,YAAY,EAAMC,IAAK,WAAc,OAAOuV,EAAUnL,Y,6BCvElGvK,OAAOC,eAAetB,EAAS,aAAc,CAAE4B,OAAO,IACtD5B,EAAQ2W,SAAM,EACd,IAAMK,EAAWxS,EAAQ,GAgEzBxE,EAAQ2W,IArDR,SAAa3L,EAAKiM,GACd,IAAIrU,EAAMoI,EAEViM,EAAMA,GAA4B,oBAAb5G,UAA4BA,SAC7C,MAAQrF,IACRA,EAAMiM,EAAIpS,SAAW,KAAOoS,EAAI/L,MAEjB,iBAARF,IACH,MAAQA,EAAI3B,OAAO,KAEf2B,EADA,MAAQA,EAAI3B,OAAO,GACb4N,EAAIpS,SAAWmG,EAGfiM,EAAI/L,KAAOF,GAGpB,sBAAsBkM,KAAKlM,KAIxBA,OADA,IAAuBiM,EACjBA,EAAIpS,SAAW,KAAOmG,EAGtB,WAAaA,GAM3BpI,EAAMoU,EAAShM,IAGdpI,EAAI2N,OACD,cAAc2G,KAAKtU,EAAIiC,UACvBjC,EAAI2N,KAAO,KAEN,eAAe2G,KAAKtU,EAAIiC,YAC7BjC,EAAI2N,KAAO,QAGnB3N,EAAI0I,KAAO1I,EAAI0I,MAAQ,IACvB,IACMJ,GADkC,IAA3BtI,EAAIsI,KAAKL,QAAQ,KACV,IAAMjI,EAAIsI,KAAO,IAAMtI,EAAIsI,KAS/C,OAPAtI,EAAI0F,GAAK1F,EAAIiC,SAAW,MAAQqG,EAAO,IAAMtI,EAAI2N,KAEjD3N,EAAIuU,KACAvU,EAAIiC,SACA,MACAqG,GACC+L,GAAOA,EAAI1G,OAAS3N,EAAI2N,KAAO,GAAK,IAAM3N,EAAI2N,MAChD3N,I,gBCjEX,IAAMoM,EAASxK,EAAQ,IAEvBvE,EAAOD,QAAU,SAACgL,EAAKjF,GAAN,OAAe,IAAIiJ,EAAOhE,EAAKjF,IAOhD9F,EAAOD,QAAQgP,OAASA,EACxB/O,EAAOD,QAAQ6E,SAAWmK,EAAOnK,SACjC5E,EAAOD,QAAQ8F,UAAYtB,EAAQ,GACnCvE,EAAOD,QAAQoX,WAAa5S,EAAQ,GACpCvE,EAAOD,QAAQ6F,OAASrB,EAAQ,I,sgDCbhC,IAAM4S,EAAa5S,EAAQ,GACrB7B,EAAU6B,EAAQ,GAGlBqB,EAASrB,EAAQ,GACjBwS,EAAWxS,EAAQ,GACnB0M,EAAU1M,EAAQ,GAElBwK,E,sQAQJ,WAAYhE,GAAgB,MAAXjF,EAAW,uDAAJ,GAAI,iBAC1B,eAEIiF,GAAO,WAAa,EAAOA,KAC7BjF,EAAOiF,EACPA,EAAM,MAGJA,GACFA,EAAMgM,EAAShM,GACfjF,EAAKyK,SAAWxF,EAAIE,KACpBnF,EAAK0K,OAA0B,UAAjBzF,EAAInG,UAAyC,QAAjBmG,EAAInG,SAC9CkB,EAAKwK,KAAOvF,EAAIuF,KACZvF,EAAIhF,QAAOD,EAAKC,MAAQgF,EAAIhF,QACvBD,EAAKmF,OACdnF,EAAKyK,SAAWwG,EAASjR,EAAKmF,MAAMA,MAGtC,EAAKuF,OACH,MAAQ1K,EAAK0K,OACT1K,EAAK0K,OACe,oBAAbJ,UAA4B,WAAaA,SAASxL,SAE3DkB,EAAKyK,WAAazK,EAAKwK,OAEzBxK,EAAKwK,KAAO,EAAKE,OAAS,MAAQ,MAGpC,EAAKD,SACHzK,EAAKyK,WACgB,oBAAbH,SAA2BA,SAASG,SAAW,aACzD,EAAKD,KACHxK,EAAKwK,OACgB,oBAAbF,UAA4BA,SAASE,KACzCF,SAASE,KACT,EAAKE,OACL,IACA,IAEN,EAAK2G,WAAarR,EAAKqR,YAAc,CAAC,UAAW,aACjD,EAAKnR,WAAa,GAClB,EAAKoR,YAAc,GACnB,EAAKC,cAAgB,EAErB,EAAKvR,KAAO,EACV,CACEuF,KAAM,aACNiM,OAAO,EACPC,iBAAiB,EACjBC,SAAS,EACTrH,OAAO,EACP2B,eAAgB,IAChB2F,iBAAiB,EACjBC,oBAAoB,EACpBC,kBAAmB,CACjBC,UAAW,MAEbC,iBAAkB,IAEpB/R,GAGF,EAAKA,KAAKuF,KAAO,EAAKvF,KAAKuF,KAAKR,QAAQ,MAAO,IAAM,IAEtB,iBAApB,EAAK/E,KAAKC,QACnB,EAAKD,KAAKC,MAAQkL,EAAQhK,OAAO,EAAKnB,KAAKC,QAI7C,EAAKsC,GAAK,KACV,EAAKyP,SAAW,KAChB,EAAKC,aAAe,KACpB,EAAKC,YAAc,KAGnB,EAAKC,iBAAmB,KAExB,EAAK9K,OA7EqB,E,qDAuFZlM,GAGd,IAAM8E,EA2jBV,SAAepD,GACb,IAAMxB,EAAI,GACV,IAAK,IAAIT,KAAKiC,EACRA,EAAIJ,eAAe7B,KACrBS,EAAET,GAAKiC,EAAIjC,IAGf,OAAOS,EAlkBS+W,CAAMjV,KAAK6C,KAAKC,OAG9BA,EAAMoS,IAAMvS,EAAOhB,SAGnBmB,EAAM4O,UAAY1T,EAGdgC,KAAKoF,KAAItC,EAAMiM,IAAM/O,KAAKoF,IAE9B,IAAMvC,EAAO,EACX,GACA7C,KAAK6C,KAAK+R,iBAAiB5W,GAC3BgC,KAAK6C,KACL,CACEC,QACAE,OAAQhD,KACRsN,SAAUtN,KAAKsN,SACfC,OAAQvN,KAAKuN,OACbF,KAAMrN,KAAKqN,OAOf,OAAO,IAAI6G,EAAWlW,GAAM6E,K,6BAS5B,IAAI6O,EACJ,GACE1R,KAAK6C,KAAK2R,iBACV1I,EAAOqJ,wBACmC,IAA1CnV,KAAKkU,WAAWvM,QAAQ,aAExB+J,EAAY,gBACP,IAAI,IAAM1R,KAAKkU,WAAWxT,OAAQ,CAEvC,IAAMxD,EAAO8C,KAIb,YAHA0L,YAAW,WACTxO,EAAK4D,KAAK,QAAS,6BAClB,GAGH4Q,EAAY1R,KAAKkU,WAAW,GAE9BlU,KAAK+C,WAAa,UAGlB,IACE2O,EAAY1R,KAAKoV,gBAAgB1D,GACjC,MAAO/K,GAKP,OAFA3G,KAAKkU,WAAW9H,aAChBpM,KAAKkK,OAIPwH,EAAUxH,OACVlK,KAAKqV,aAAa3D,K,mCAQPA,GAGX,IAAMxU,EAAO8C,KAETA,KAAK0R,WAGP1R,KAAK0R,UAAUlR,qBAIjBR,KAAK0R,UAAYA,EAGjBA,EACG9R,GAAG,SAAS,WACX1C,EAAKoY,aAEN1V,GAAG,UAAU,SAASsC,GACrBhF,EAAK0G,SAAS1B,MAEftC,GAAG,SAAS,SAAS+G,GACpBzJ,EAAKqY,QAAQ5O,MAEd/G,GAAG,SAAS,WACX1C,EAAKsG,QAAQ,wB,4BAUbxF,GAGJ,IAAI0T,EAAY1R,KAAKoV,gBAAgBpX,EAAM,CAAEwX,MAAO,IAChDC,GAAS,EACPvY,EAAO8C,KAIb,SAAS0V,IACP,GAAIxY,EAAKyY,mBAAoB,CAC3B,IAAMC,GACH5V,KAAK8O,gBAAkB5R,EAAKwU,UAAU5C,eACzC2G,EAASA,GAAUG,EAEjBH,IAIJ/D,EAAUmE,KAAK,CAAC,CAAEnT,KAAM,OAAQiB,KAAM,WACtC+N,EAAUvR,KAAK,UAAU,SAAS8C,GAChC,IAAIwS,EACJ,GAAI,SAAWxS,EAAIP,MAAQ,UAAYO,EAAIU,KAAM,CAK/C,GAFAzG,EAAK4Y,WAAY,EACjB5Y,EAAK4D,KAAK,YAAa4Q,IAClBA,EAAW,OAChB5F,EAAOqJ,sBAAwB,cAAgBzD,EAAU1T,KAIzDd,EAAKwU,UAAUrD,OAAM,WACfoH,GACA,WAAavY,EAAK6F,aAItBuI,IAEApO,EAAKmY,aAAa3D,GAClBA,EAAUmE,KAAK,CAAC,CAAEnT,KAAM,aACxBxF,EAAK4D,KAAK,UAAW4Q,GACrBA,EAAY,KACZxU,EAAK4Y,WAAY,EACjB5Y,EAAK6Y,gBAEF,CAGL,IAAM5S,EAAM,IAAIC,MAAM,eACtBD,EAAIuO,UAAYA,EAAU1T,KAC1Bd,EAAK4D,KAAK,eAAgBqC,QAKhC,SAAS6S,IACHP,IAGJA,GAAS,EAETnK,IAEAoG,EAAU/F,QACV+F,EAAY,MAId,SAASuE,EAAQ9S,GACf,IAAM+S,EAAQ,IAAI9S,MAAM,gBAAkBD,GAC1C+S,EAAMxE,UAAYA,EAAU1T,KAE5BgY,IAKA9Y,EAAK4D,KAAK,eAAgBoV,GAG5B,SAASC,IACPF,EAAQ,oBAIV,SAAS1D,IACP0D,EAAQ,iBAIV,SAASG,EAAUC,GACb3E,GAAa2E,EAAGrY,OAAS0T,EAAU1T,MAGrCgY,IAKJ,SAAS1K,IACPoG,EAAUnR,eAAe,OAAQmV,GACjChE,EAAUnR,eAAe,QAAS0V,GAClCvE,EAAUnR,eAAe,QAAS4V,GAClCjZ,EAAKqD,eAAe,QAASgS,GAC7BrV,EAAKqD,eAAe,YAAa6V,GAnGnCtK,EAAOqJ,uBAAwB,EAsG/BzD,EAAUvR,KAAK,OAAQuV,GACvBhE,EAAUvR,KAAK,QAAS8V,GACxBvE,EAAUvR,KAAK,QAASgW,GAExBnW,KAAKG,KAAK,QAASoS,GACnBvS,KAAKG,KAAK,YAAaiW,GAEvB1E,EAAUxH,S,+BAkBV,GAPAlK,KAAK+C,WAAa,OAClB+I,EAAOqJ,sBAAwB,cAAgBnV,KAAK0R,UAAU1T,KAC9DgC,KAAKc,KAAK,QACVd,KAAK+V,QAKH,SAAW/V,KAAK+C,YAChB/C,KAAK6C,KAAK0R,SACVvU,KAAK0R,UAAUrD,MAMf,IAFA,IAAI5Q,EAAI,EACFC,EAAIsC,KAAK6U,SAASnU,OACjBjD,EAAIC,EAAGD,IACZuC,KAAKwV,MAAMxV,KAAK6U,SAASpX,M,+BAUtByE,GACP,GACE,YAAclC,KAAK+C,YACnB,SAAW/C,KAAK+C,YAChB,YAAc/C,KAAK+C,WAUnB,OALA/C,KAAKc,KAAK,SAAUoB,GAGpBlC,KAAKc,KAAK,aAEFoB,EAAOQ,MACb,IAAK,OACH1C,KAAKsW,YAAYjR,KAAKqB,MAAMxE,EAAOyB,OACnC,MAEF,IAAK,OACH3D,KAAKuW,mBACLvW,KAAKwW,WAAW,QAChBxW,KAAKc,KAAK,QACV,MAEF,IAAK,QACH,IAAMqC,EAAM,IAAIC,MAAM,gBACtBD,EAAIsT,KAAOvU,EAAOyB,KAClB3D,KAAKuV,QAAQpS,GACb,MAEF,IAAK,UACHnD,KAAKc,KAAK,OAAQoB,EAAOyB,MACzB3D,KAAKc,KAAK,UAAWoB,EAAOyB,S,kCAexBA,GACV3D,KAAKc,KAAK,YAAa6C,GACvB3D,KAAKoF,GAAKzB,EAAKoL,IACf/O,KAAK0R,UAAU5O,MAAMiM,IAAMpL,EAAKoL,IAChC/O,KAAK6U,SAAW7U,KAAK0W,eAAe/S,EAAKkR,UACzC7U,KAAK8U,aAAenR,EAAKmR,aACzB9U,KAAK+U,YAAcpR,EAAKoR,YACxB/U,KAAKyO,SAED,WAAazO,KAAK+C,YACtB/C,KAAKuW,qB,yCAQY,WACjB3K,aAAa5L,KAAKgV,kBAClBhV,KAAKgV,iBAAmBtJ,YAAW,WACjC,EAAKlI,QAAQ,kBACZxD,KAAK8U,aAAe9U,KAAK+U,e,gCAS5B/U,KAAKmU,YAAYtT,OAAO,EAAGb,KAAKoU,eAKhCpU,KAAKoU,cAAgB,EAEjB,IAAMpU,KAAKmU,YAAYzT,OACzBV,KAAKc,KAAK,SAEVd,KAAK+V,U,8BAWL,WAAa/V,KAAK+C,YAClB/C,KAAK0R,UAAUhO,WACd1D,KAAK8V,WACN9V,KAAKmU,YAAYzT,SAIjBV,KAAK0R,UAAUmE,KAAK7V,KAAKmU,aAGzBnU,KAAKoU,cAAgBpU,KAAKmU,YAAYzT,OACtCV,KAAKc,KAAK,Y,4BAaRmC,EAAKiJ,EAASnM,GAElB,OADAC,KAAKwW,WAAW,UAAWvT,EAAKiJ,EAASnM,GAClCC,O,2BAGJiD,EAAKiJ,EAASnM,GAEjB,OADAC,KAAKwW,WAAW,UAAWvT,EAAKiJ,EAASnM,GAClCC,O,iCAYE0C,EAAMiB,EAAMuI,EAASnM,GAW9B,GAVI,mBAAsB4D,IACxB5D,EAAK4D,EACLA,OAAOyC,GAGL,mBAAsB8F,IACxBnM,EAAKmM,EACLA,EAAU,MAGR,YAAclM,KAAK+C,YAAc,WAAa/C,KAAK+C,WAAvD,EAIAmJ,EAAUA,GAAW,IACbqF,UAAW,IAAUrF,EAAQqF,SAErC,IAAMrP,EAAS,CACbQ,KAAMA,EACNiB,KAAMA,EACNuI,QAASA,GAEXlM,KAAKc,KAAK,eAAgBoB,GAC1BlC,KAAKmU,YAAYjU,KAAKgC,GAClBnC,GAAIC,KAAKG,KAAK,QAASJ,GAC3BC,KAAK+V,W,8BASL,IAAM7Y,EAAO8C,KAoBb,SAAS2L,IACPzO,EAAKsG,QAAQ,gBAGbtG,EAAKwU,UAAU/F,QAGjB,SAASgL,IACPzZ,EAAKqD,eAAe,UAAWoW,GAC/BzZ,EAAKqD,eAAe,eAAgBoW,GACpChL,IAGF,SAASiL,IAEP1Z,EAAKiD,KAAK,UAAWwW,GACrBzZ,EAAKiD,KAAK,eAAgBwW,GAG5B,MArCI,YAAc3W,KAAK+C,YAAc,SAAW/C,KAAK+C,aACnD/C,KAAK+C,WAAa,UAEd/C,KAAKmU,YAAYzT,OACnBV,KAAKG,KAAK,SAAS,WACbH,KAAK8V,UACPc,IAEAjL,OAGK3L,KAAK8V,UACdc,IAEAjL,KAuBG3L,O,8BAQDmD,GAGN2I,EAAOqJ,uBAAwB,EAC/BnV,KAAKc,KAAK,QAASqC,GACnBnD,KAAKwD,QAAQ,kBAAmBL,K,8BAQ1BmJ,EAAQpJ,GAEZ,YAAclD,KAAK+C,YACnB,SAAW/C,KAAK+C,YAChB,YAAc/C,KAAK+C,aAOnB6I,aAAa5L,KAAK6W,mBAClBjL,aAAa5L,KAAKgV,kBAGlBhV,KAAK0R,UAAUlR,mBAAmB,SAGlCR,KAAK0R,UAAU/F,QAGf3L,KAAK0R,UAAUlR,qBAGfR,KAAK+C,WAAa,SAGlB/C,KAAKoF,GAAK,KAGVpF,KAAKc,KAAK,QAASwL,EAAQpJ,GAtBdlD,KA0BRmU,YAAc,GA1BNnU,KA2BRoU,cAAgB,K,qCAWVS,GAIb,IAHA,IAAMiC,EAAmB,GACrBrZ,EAAI,EACFsZ,EAAIlC,EAASnU,OACZjD,EAAIsZ,EAAGtZ,KACPuC,KAAKkU,WAAWvM,QAAQkN,EAASpX,KACpCqZ,EAAiB5W,KAAK2U,EAASpX,IAEnC,OAAOqZ,O,8BA/oBUrX,GAmpBrBqM,EAAOqJ,uBAAwB,EAQ/BrJ,EAAOnK,SAAWgB,EAAOhB,SAYzB5E,EAAOD,QAAUgP,G,cCtqBjB,IACE/O,EAAOD,QAAoC,oBAAnB6P,gBACtB,oBAAqB,IAAIA,eAC3B,MAAOxJ,GAGPpG,EAAOD,SAAU,I,myDCbnB,IAAM6P,EAAiBrL,EAAQ,GACzB4M,EAAU5M,EAAQ,IAClB7B,EAAU6B,EAAQ,GAChByO,EAASzO,EAAQ,IAAjByO,KACFnC,EAAatM,EAAQ,GAS3B,SAAS0V,KAET,IAAMC,EAEG,MADK,IAAItK,EAAe,CAAEa,SAAS,IACvB0J,aAGftK,E,8BAOJ,WAAY/J,GAAM,MAGhB,GAHgB,UAChB,cAAMA,GAEkB,oBAAbsK,SAA0B,CACnC,IAAMC,EAAQ,WAAaD,SAASxL,SAChC0L,EAAOF,SAASE,KAGfA,IACHA,EAAOD,EAAQ,IAAM,IAGvB,EAAKJ,GACkB,oBAAbG,UACNtK,EAAKyK,WAAaH,SAASG,UAC7BD,IAASxK,EAAKwK,KAChB,EAAKJ,GAAKpK,EAAK0K,SAAWH,EAK5B,IAAM+J,EAActU,GAAQA,EAAKsU,YArBjB,OAsBhB,EAAKrI,eAAiBmI,IAAYE,EAtBlB,E,4CA+BC,IAAXtU,EAAW,uDAAJ,GAEb,OADA,EAAcA,EAAM,CAAEmK,GAAIhN,KAAKgN,GAAIC,GAAIjN,KAAKiN,IAAMjN,KAAK6C,MAChD,IAAIuU,EAAQpX,KAAK8H,MAAOjF,K,8BAUzBc,EAAM5D,GACZ,IAAMsX,EAAMrX,KAAKsX,QAAQ,CACvBC,OAAQ,OACR5T,KAAMA,IAEFzG,EAAO8C,KACbqX,EAAIzX,GAAG,UAAWG,GAClBsX,EAAIzX,GAAG,SAAS,SAASuD,GACvBjG,EAAKqY,QAAQ,iBAAkBpS,Q,+BAYjC,IAAMkU,EAAMrX,KAAKsX,UACXpa,EAAO8C,KACbqX,EAAIzX,GAAG,QAAQ,SAAS+D,GACtBzG,EAAKsa,OAAO7T,MAEd0T,EAAIzX,GAAG,SAAS,SAASuD,GACvBjG,EAAKqY,QAAQ,iBAAkBpS,MAEjCnD,KAAKyX,QAAUJ,M,GA9EDnJ,GAkFZkJ,E,8BAOJ,WAAYtP,EAAKjF,GAAM,wBACrB,gBACKA,KAAOA,EAEZ,EAAK0U,OAAS1U,EAAK0U,QAAU,MAC7B,EAAKzP,IAAMA,EACX,EAAK4P,OAAQ,IAAU7U,EAAK6U,MAC5B,EAAK/T,UAAOyC,IAAcvD,EAAKc,KAAOd,EAAKc,KAAO,KAElD,EAAK5E,SATgB,E,2CAkBrB,IAAM8D,EAAOkN,EACX/P,KAAK6C,KACL,QACA,aACA,MACA,MACA,aACA,OACA,KACA,UACA,sBAEFA,EAAK2K,UAAYxN,KAAK6C,KAAKmK,GAC3BnK,EAAK4K,UAAYzN,KAAK6C,KAAKoK,GAE3B,IAAM0K,EAAO3X,KAAK2X,IAAM,IAAIhL,EAAe9J,GACrC3F,EAAO8C,KAEb,IAGE2X,EAAIzN,KAAKlK,KAAKuX,OAAQvX,KAAK8H,IAAK9H,KAAK0X,OACrC,IACE,GAAI1X,KAAK6C,KAAK+U,aAEZ,IAAK,IAAIna,KADTka,EAAIE,uBAAyBF,EAAIE,uBAAsB,GACzC7X,KAAK6C,KAAK+U,aAClB5X,KAAK6C,KAAK+U,aAAatY,eAAe7B,IACxCka,EAAIG,iBAAiBra,EAAGuC,KAAK6C,KAAK+U,aAAana,IAIrD,MAAOkJ,IAET,GAAI,SAAW3G,KAAKuX,OAClB,IACEI,EAAIG,iBAAiB,eAAgB,4BACrC,MAAOnR,IAGX,IACEgR,EAAIG,iBAAiB,SAAU,OAC/B,MAAOnR,IAGL,oBAAqBgR,IACvBA,EAAIrD,gBAAkBtU,KAAK6C,KAAKyR,iBAG9BtU,KAAK6C,KAAKkV,iBACZJ,EAAIhO,QAAU3J,KAAK6C,KAAKkV,gBAGtB/X,KAAKgY,UACPL,EAAIM,OAAS,WACX/a,EAAKgb,UAEPP,EAAI1B,QAAU,WACZ/Y,EAAKqY,QAAQoC,EAAIQ,gBAGnBR,EAAIS,mBAAqB,WACnB,IAAMT,EAAI5U,aACV,MAAQ4U,EAAIU,QAAU,OAASV,EAAIU,OACrCnb,EAAKgb,SAILxM,YAAW,WACTxO,EAAKqY,QAA8B,iBAAfoC,EAAIU,OAAsBV,EAAIU,OAAS,KAC1D,KAOTV,EAAI9B,KAAK7V,KAAK2D,MACd,MAAOgD,GAOP,YAHA+E,YAAW,WACTxO,EAAKqY,QAAQ5O,KACZ,GAImB,oBAAb2R,WACTtY,KAAKwO,MAAQ4I,EAAQmB,gBACrBnB,EAAQoB,SAASxY,KAAKwO,OAASxO,Q,kCAUjCA,KAAKc,KAAK,WACVd,KAAKsL,Y,6BAQA3H,GACL3D,KAAKc,KAAK,OAAQ6C,GAClB3D,KAAKyY,c,8BAQCtV,GACNnD,KAAKc,KAAK,QAASqC,GACnBnD,KAAKsL,SAAQ,K,8BAQPoN,GACN,QAAI,IAAuB1Y,KAAK2X,KAAO,OAAS3X,KAAK2X,IAArD,CAUA,GANI3X,KAAKgY,SACPhY,KAAK2X,IAAIM,OAASjY,KAAK2X,IAAI1B,QAAUe,EAErChX,KAAK2X,IAAIS,mBAAqBpB,EAG5B0B,EACF,IACE1Y,KAAK2X,IAAIgB,QACT,MAAOhS,IAGa,oBAAb2R,iBACFlB,EAAQoB,SAASxY,KAAKwO,OAG/BxO,KAAK2X,IAAM,Q,+BASX,IAAMhU,EAAO3D,KAAK2X,IAAIQ,aACT,OAATxU,GACF3D,KAAKwX,OAAO7T,K,+BAUd,MAAiC,oBAAnBmK,iBAAmC9N,KAAKiN,IAAMjN,KAAK6N,a,8BASjE7N,KAAKsL,c,GA5Ma7L,GAyNtB,GAHA2X,EAAQmB,cAAgB,EACxBnB,EAAQoB,SAAW,GAEK,oBAAbF,SACT,GAA2B,mBAAhBM,YACTA,YAAY,WAAYC,QACnB,GAAgC,mBAArBhZ,iBAAiC,CAEjDA,iBADyB,eAAgB+N,EAAa,WAAa,SAChCiL,GAAe,GAItD,SAASA,IACP,IAAK,IAAIpb,KAAK2Z,EAAQoB,SAChBpB,EAAQoB,SAASlZ,eAAe7B,IAClC2Z,EAAQoB,SAAS/a,GAAGkb,QAK1B5b,EAAOD,QAAU8P,EACjB7P,EAAOD,QAAQsa,QAAUA,G,oBCnVjBnI,EAAiB3N,EAAQ,IAAzB2N,aAEF2D,EACY,mBAATC,MACU,oBAATA,MACmC,6BAAzC1U,OAAOkB,UAAUsT,SAAS/U,KAAKiV,MAC7BJ,EAA+C,mBAAhBC,YA8B/BoG,EAAqB,SAACnV,EAAM7B,GAChC,IAAMiX,EAAa,IAAIC,WAKvB,OAJAD,EAAWd,OAAS,WAClB,IAAMgB,EAAUF,EAAWG,OAAO1W,MAAM,KAAK,GAC7CV,EAAS,IAAMmX,IAEVF,EAAWI,cAAcxV,IAGlC5G,EAAOD,QA9Bc,SAAC,EAAgBgS,EAAgBhN,GAAa,IANpDpC,EAMSgD,EAA2C,EAA3CA,KAAMiB,EAAqC,EAArCA,KAC5B,OAAIiP,GAAkBjP,aAAgBkP,KAChC/D,EACKhN,EAAS6B,GAETmV,EAAmBnV,EAAM7B,GAGlC2Q,IACC9O,aAAgB+O,cAfNhT,EAe4BiE,EAdJ,mBAAvB+O,YAAYM,OACtBN,YAAYM,OAAOtT,GACnBA,GAAOA,EAAIuT,kBAAkBP,cAc3B5D,EACKhN,EAAS6B,aAAgB+O,YAAc/O,EAAOA,EAAKsP,QAEnD6F,EAAmB,IAAIjG,KAAK,CAAClP,IAAQ7B,GAIzCA,EAASmN,EAAavM,IAASiB,GAAQ,O,oBC7B5CyV,E,EAJ2C9X,EAAQ,IAA/C4N,E,EAAAA,qBAAsBC,E,EAAAA,aAEuB,mBAAhBuD,cAInC0G,EAAgB9X,EAAQ,KAG1B,IA4BM+X,EAAqB,SAAC1V,EAAMpB,GAChC,GAAI6W,EAAe,CACjB,IAAMtJ,EAAUsJ,EAAcpV,OAAOL,GACrC,OAAO2V,EAAUxJ,EAASvN,GAE1B,MAAO,CAAEyD,QAAQ,EAAMrC,SAIrB2V,EAAY,SAAC3V,EAAMpB,GACvB,OAAQA,GACN,IAAK,OACH,OAAOoB,aAAgB+O,YAAc,IAAIG,KAAK,CAAClP,IAASA,EAC1D,IAAK,cACL,QACE,OAAOA,IAIb5G,EAAOD,QA/Cc,SAACqF,EAAeI,GACnC,GAA6B,iBAAlBJ,EACT,MAAO,CACLO,KAAM,UACNiB,KAAM2V,EAAUnX,EAAeI,IAGnC,IAAMG,EAAOP,EAAcgE,OAAO,GAClC,MAAa,MAATzD,EACK,CACLA,KAAM,UACNiB,KAAM0V,EAAmBlX,EAAcoE,UAAU,GAAIhE,IAGtC2M,EAAqBxM,GAIjCP,EAAczB,OAAS,EAC1B,CACEgC,KAAMwM,EAAqBxM,GAC3BiB,KAAMxB,EAAcoE,UAAU,IAEhC,CACE7D,KAAMwM,EAAqBxM,IARxByM,I,eClBX,WACE,aAMA,IAJA,IAAIoK,EAAQ,mEAGRlG,EAAS,IAAImG,WAAW,KACnB/b,EAAI,EAAGA,EAAI8b,EAAM7Y,OAAQjD,IAChC4V,EAAOkG,EAAME,WAAWhc,IAAMA,EAGhCX,EAAQ+G,OAAS,SAAS6V,GACxB,IACAjc,EADIkc,EAAQ,IAAIH,WAAWE,GACxBzY,EAAM0Y,EAAMjZ,OAAQsF,EAAS,GAEhC,IAAKvI,EAAI,EAAGA,EAAIwD,EAAKxD,GAAG,EACtBuI,GAAUuT,EAAMI,EAAMlc,IAAM,GAC5BuI,GAAUuT,GAAmB,EAAXI,EAAMlc,KAAW,EAAMkc,EAAMlc,EAAI,IAAM,GACzDuI,GAAUuT,GAAuB,GAAfI,EAAMlc,EAAI,KAAY,EAAMkc,EAAMlc,EAAI,IAAM,GAC9DuI,GAAUuT,EAAqB,GAAfI,EAAMlc,EAAI,IAS5B,OANKwD,EAAM,GAAO,EAChB+E,EAASA,EAAOO,UAAU,EAAGP,EAAOtF,OAAS,GAAK,IACzCO,EAAM,GAAM,IACrB+E,EAASA,EAAOO,UAAU,EAAGP,EAAOtF,OAAS,GAAK,MAG7CsF,GAGTlJ,EAAQkH,OAAU,SAASgC,GACzB,IACqBvI,EACrBmc,EAAUC,EAAUC,EAAUC,EAF1BC,EAA+B,IAAhBhU,EAAOtF,OAC1BO,EAAM+E,EAAOtF,OAAWnB,EAAI,EAGM,MAA9ByG,EAAOA,EAAOtF,OAAS,KACzBsZ,IACkC,MAA9BhU,EAAOA,EAAOtF,OAAS,IACzBsZ,KAIJ,IAAIN,EAAc,IAAIhH,YAAYsH,GAClCL,EAAQ,IAAIH,WAAWE,GAEvB,IAAKjc,EAAI,EAAGA,EAAIwD,EAAKxD,GAAG,EACtBmc,EAAWvG,EAAOrN,EAAOyT,WAAWhc,IACpCoc,EAAWxG,EAAOrN,EAAOyT,WAAWhc,EAAE,IACtCqc,EAAWzG,EAAOrN,EAAOyT,WAAWhc,EAAE,IACtCsc,EAAW1G,EAAOrN,EAAOyT,WAAWhc,EAAE,IAEtCkc,EAAMpa,KAAQqa,GAAY,EAAMC,GAAY,EAC5CF,EAAMpa,MAAoB,GAAXsa,IAAkB,EAAMC,GAAY,EACnDH,EAAMpa,MAAoB,EAAXua,IAAiB,EAAiB,GAAXC,EAGxC,OAAOL,GAzDX,I,mgDCPA,IAUI9Y,EAVEsN,EAAU5M,EAAQ,IAClBsM,EAAatM,EAAQ,GAErB2Y,EAAW,MACXC,EAAkB,OAYxB,SAASlD,K,IAEHmD,E,sQAOJ,WAAYtX,GAAM,O,4FAAA,UAChB,cAAMA,IAEDC,MAAQ,EAAKA,OAAS,GAItBlC,IAEHA,EAAYgN,EAAWwM,OAASxM,EAAWwM,QAAU,IAIvD,EAAK5L,MAAQ5N,EAAUF,OAGvB,IAAMxD,EAAO,EAAH,GAhBM,OAiBhB0D,EAAUV,MAAK,SAAS+C,GACtB/F,EAAKsa,OAAOvU,MAId,EAAKH,MAAMiU,EAAI,EAAKvI,MAGY,mBAArB3O,kBACTA,iBACE,gBACA,WACM3C,EAAKmd,SAAQnd,EAAKmd,OAAOpE,QAAUe,MAEzC,GA/BY,E,+CAiDZhX,KAAKqa,SACPra,KAAKqa,OAAOC,WAAWC,YAAYva,KAAKqa,QACxCra,KAAKqa,OAAS,MAGZra,KAAKwa,OACPxa,KAAKwa,KAAKF,WAAWC,YAAYva,KAAKwa,MACtCxa,KAAKwa,KAAO,KACZxa,KAAKya,OAAS,MAGhB,8C,+BASA,IAAMvd,EAAO8C,KACPqa,EAAS/B,SAASoC,cAAc,UAElC1a,KAAKqa,SACPra,KAAKqa,OAAOC,WAAWC,YAAYva,KAAKqa,QACxCra,KAAKqa,OAAS,MAGhBA,EAAO3C,OAAQ,EACf2C,EAAO5S,IAAMzH,KAAK8H,MAClBuS,EAAOpE,QAAU,SAAStP,GACxBzJ,EAAKqY,QAAQ,mBAAoB5O,IAGnC,IAAMgU,EAAWrC,SAASsC,qBAAqB,UAAU,GACrDD,EACFA,EAASL,WAAWO,aAAaR,EAAQM,IAExCrC,SAASwC,MAAQxC,SAASyC,MAAMC,YAAYX,GAE/Cra,KAAKqa,OAASA,EAGZ,oBAAuBY,WAAa,SAASjH,KAAKiH,UAAUC,YAG5DxP,YAAW,WACT,IAAM+O,EAASnC,SAASoC,cAAc,UACtCpC,SAASyC,KAAKC,YAAYP,GAC1BnC,SAASyC,KAAKR,YAAYE,KACzB,O,8BAWC9W,EAAM5D,GACZ,IACI0a,EADEvd,EAAO8C,KAGb,IAAKA,KAAKwa,KAAM,CACd,IAAMA,EAAOlC,SAASoC,cAAc,QAC9BS,EAAO7C,SAASoC,cAAc,YAC9BtV,EAAMpF,KAAKob,SAAW,cAAgBpb,KAAKwO,MAEjDgM,EAAKa,UAAY,WACjBb,EAAKc,MAAMC,SAAW,WACtBf,EAAKc,MAAME,IAAM,UACjBhB,EAAKc,MAAMG,KAAO,UAClBjB,EAAKkB,OAAStW,EACdoV,EAAKjD,OAAS,OACdiD,EAAKmB,aAAa,iBAAkB,SACpCR,EAAKnd,KAAO,IACZwc,EAAKQ,YAAYG,GACjB7C,SAASyC,KAAKC,YAAYR,GAE1Bxa,KAAKwa,KAAOA,EACZxa,KAAKmb,KAAOA,EAKd,SAASS,IACPC,IACA9b,IAGF,SAAS8b,IACP,GAAI3e,EAAKud,OACP,IACEvd,EAAKsd,KAAKD,YAAYrd,EAAKud,QAC3B,MAAO9T,GACPzJ,EAAKqY,QAAQ,qCAAsC5O,GAIvD,IAEE,IAAMmV,EAAO,oCAAsC5e,EAAKke,SAAW,KACnEX,EAASnC,SAASoC,cAAcoB,GAChC,MAAOnV,IACP8T,EAASnC,SAASoC,cAAc,WACzB1c,KAAOd,EAAKke,SACnBX,EAAOhT,IAAM,eAGfgT,EAAOrV,GAAKlI,EAAKke,SAEjBle,EAAKsd,KAAKQ,YAAYP,GACtBvd,EAAKud,OAASA,EA7BhBza,KAAKwa,KAAKuB,OAAS/b,KAAK8H,MAgCxB+T,IAIAlY,EAAOA,EAAKiE,QAAQsS,EAAiB,QACrCla,KAAKmb,KAAKzc,MAAQiF,EAAKiE,QAAQqS,EAAU,OAEzC,IACEja,KAAKwa,KAAKwB,SACV,MAAOrV,IAEL3G,KAAKya,OAAO7B,YACd5Y,KAAKya,OAAOrC,mBAAqB,WACA,aAA3Blb,EAAKud,OAAO1X,YACd6Y,KAIJ5b,KAAKya,OAAOxC,OAAS2D,I,qCAhJvB,OAAO,O,8BA/CgB1N,GAoM3BnR,EAAOD,QAAUqd,G,ytCCtNjB,IAAMvX,EAAYtB,EAAQ,GACpBqB,EAASrB,EAAQ,GACjB0M,EAAU1M,EAAQ,GAClB2M,EAAQ3M,EAAQ,IACdyO,EAASzO,EAAQ,IAAjByO,K,EAKJzO,EAAQ,IAHV2a,E,EAAAA,UACAC,E,EAAAA,sBACAC,E,EAAAA,kBAOIC,EACiB,oBAAdnB,WACsB,iBAAtBA,UAAUoB,SACmB,gBAApCpB,UAAUoB,QAAQC,cAEdC,E,sQAOJ,WAAY1Z,GAAM,a,4FAAA,UAChB,cAAMA,IAEDiM,gBAAkBjM,EAAKsU,YAHZ,E,8CAqBhB,GAAKnX,KAAKwc,QAAV,CAKA,IAAM1U,EAAM9H,KAAK8H,MACX2U,EAAYzc,KAAK6C,KAAK4Z,UAGtB5Z,EAAOuZ,EACT,GACArM,EACE/P,KAAK6C,KACL,QACA,oBACA,MACA,MACA,aACA,OACA,KACA,UACA,qBACA,gBAGF7C,KAAK6C,KAAK+U,eACZ/U,EAAK6Z,QAAU1c,KAAK6C,KAAK+U,cAG3B,IACE5X,KAAK2c,GACHT,IAA0BE,EACtBK,EACE,IAAIR,EAAUnU,EAAK2U,GACnB,IAAIR,EAAUnU,GAChB,IAAImU,EAAUnU,EAAK2U,EAAW5Z,GACpC,MAAOM,GACP,OAAOnD,KAAKc,KAAK,QAASqC,GAG5BnD,KAAK2c,GAAGpa,WAAavC,KAAKgD,OAAOT,YAAc4Z,EAE/Cnc,KAAK4c,uB,0CASL,IAAM1f,EAAO8C,KAEbA,KAAK2c,GAAGvR,OAAS,WACflO,EAAKuR,UAEPzO,KAAK2c,GAAGpK,QAAU,WAChBrV,EAAKsG,WAEPxD,KAAK2c,GAAGE,UAAY,SAASvL,GAC3BpU,EAAKsa,OAAOlG,EAAG3N,OAEjB3D,KAAK2c,GAAG1G,QAAU,SAAStP,GACzBzJ,EAAKqY,QAAQ,kBAAmB5O,M,4BAU9B9E,GACJ,IAAM3E,EAAO8C,KACbA,KAAK0D,UAAW,EAOhB,IAHA,IAAI4K,EAAQzM,EAAQnB,OAChBjD,EAAI,EACFC,EAAI4Q,EACH7Q,EAAIC,EAAGD,KACZ,SAAUyE,GACRS,EAAOtB,aAAaa,EAAQhF,EAAK4R,gBAAgB,SAASnL,GAExD,IAAMd,EAAO,GACRqZ,IACCha,EAAOgK,UACTrJ,EAAK0O,SAAWrP,EAAOgK,QAAQqF,UAG7BrU,EAAK2F,KAAK6R,oBAEV,iBAAoB/Q,EAChBmZ,OAAOC,WAAWpZ,GAClBA,EAAKjD,QACDxD,EAAK2F,KAAK6R,kBAAkBC,YACpC9R,EAAK0O,UAAW,IAQtB,IACM2K,EAEFhf,EAAKyf,GAAG9G,KAAKlS,GAEbzG,EAAKyf,GAAG9G,KAAKlS,EAAMd,GAErB,MAAO8D,MAKP2H,IAMNpR,EAAK4D,KAAK,SAIV4K,YAAW,WACTxO,EAAKwG,UAAW,EAChBxG,EAAK4D,KAAK,WACT,OAhDH,CAqCGe,EAAQpE,M,gCAqBbmF,EAAUvD,UAAUmE,QAAQ5F,KAAKoC,Q,qCASV,IAAZA,KAAK2c,IACd3c,KAAK2c,GAAGhR,U,4BAUV,IAAI7I,EAAQ9C,KAAK8C,OAAS,GACpB6L,EAAS3O,KAAK6C,KAAK0K,OAAS,MAAQ,KACtCF,EAAO,GA6BX,OAzBErN,KAAK6C,KAAKwK,OACR,QAAUsB,GAAqC,MAA3BzI,OAAOlG,KAAK6C,KAAKwK,OACpC,OAASsB,GAAqC,KAA3BzI,OAAOlG,KAAK6C,KAAKwK,SAEvCA,EAAO,IAAMrN,KAAK6C,KAAKwK,MAIrBrN,KAAK6C,KAAK+L,oBACZ9L,EAAM9C,KAAK6C,KAAKgM,gBAAkBZ,KAI/BjO,KAAK8O,iBACRhM,EAAMkM,IAAM,IAGdlM,EAAQkL,EAAQnK,OAAOf,IAGbpC,SACRoC,EAAQ,IAAMA,GAKd6L,EACA,QAHgD,IAArC3O,KAAK6C,KAAKyK,SAAS3F,QAAQ,KAI9B,IAAM3H,KAAK6C,KAAKyK,SAAW,IAAMtN,KAAK6C,KAAKyK,UACnDD,EACArN,KAAK6C,KAAKuF,KACVtF,I,8BAWF,SACImZ,GACA,iBAAkBA,GAAajc,KAAKhC,OAASue,EAAGld,UAAUrB,Q,2BA5N9D,MAAO,iB,8BAnBM4E,GAoPjB7F,EAAOD,QAAUyf,G,gBCxQjB,IAAM3O,EAAatM,EAAQ,GAE3BvE,EAAOD,QAAU,CACfmf,UAAWrO,EAAWqO,WAAarO,EAAWoP,aAC9Cd,uBAAuB,EACvBC,kBAAmB,gB,kQCJrBhe,OAAOC,eAAetB,EAAS,aAAc,CAAE4B,OAAO,IACtD5B,EAAQwK,kBAAoBxK,EAAQ0I,uBAAoB,EACxD,IAAMd,EAAcpD,EAAQ,IAgB5BxE,EAAQ0I,kBARR,SAA2BtD,GACvB,IAAMwD,EAAU,GACVuX,EAAa/a,EAAOyB,KACpB8B,EAAOvD,EAGb,OAFAuD,EAAK9B,KAKT,SAASuZ,EAAmBvZ,EAAM+B,GAC9B,IAAK/B,EACD,OAAOA,EACX,GAAIe,EAAYqB,SAASpC,GAAO,CAC5B,IAAMwZ,EAAc,CAAEC,cAAc,EAAM5N,IAAK9J,EAAQhF,QAEvD,OADAgF,EAAQxF,KAAKyD,GACNwZ,EAEN,GAAInc,MAAMmG,QAAQxD,GAAO,CAE1B,IADA,IAAM0Z,EAAU,IAAIrc,MAAM2C,EAAKjD,QACtBjD,EAAI,EAAGA,EAAIkG,EAAKjD,OAAQjD,IAC7B4f,EAAQ5f,GAAKyf,EAAmBvZ,EAAKlG,GAAIiI,GAE7C,OAAO2X,EAEN,GAAoB,WAAhB,EAAO1Z,MAAuBA,aAAgBkM,MAAO,CAC1D,IAAMwN,EAAU,GAChB,IAAK,IAAMre,KAAO2E,EACVA,EAAKrE,eAAeN,KACpBqe,EAAQre,GAAOke,EAAmBvZ,EAAK3E,GAAM0G,IAGrD,OAAO2X,EAEX,OAAO1Z,EA7BKuZ,CAAmBD,EAAYvX,GAC3CD,EAAKP,YAAcQ,EAAQhF,OACpB,CAAEwB,OAAQuD,EAAMC,QAASA,IA0CpC5I,EAAQwK,kBALR,SAA2BpF,EAAQwD,GAG/B,OAFAxD,EAAOyB,KAKX,SAAS2Z,EAAmB3Z,EAAM+B,GAC9B,IAAK/B,EACD,OAAOA,EACX,GAAIA,GAAQA,EAAKyZ,aACb,OAAO1X,EAAQ/B,EAAK6L,KAEnB,GAAIxO,MAAMmG,QAAQxD,GACnB,IAAK,IAAIlG,EAAI,EAAGA,EAAIkG,EAAKjD,OAAQjD,IAC7BkG,EAAKlG,GAAK6f,EAAmB3Z,EAAKlG,GAAIiI,QAGzC,GAAoB,WAAhB,EAAO/B,GACZ,IAAK,IAAM3E,KAAO2E,EACVA,EAAKrE,eAAeN,KACpB2E,EAAK3E,GAAOse,EAAmB3Z,EAAK3E,GAAM0G,IAItD,OAAO/B,EAvBO2Z,CAAmBpb,EAAOyB,KAAM+B,GAC9CxD,EAAOgD,iBAAckB,EACdlE,I,cCtCX,SAAS4G,EAAQjG,GACfA,EAAOA,GAAQ,GACf7C,KAAKud,GAAK1a,EAAK2G,KAAO,IACtBxJ,KAAKyJ,IAAM5G,EAAK4G,KAAO,IACvBzJ,KAAKwd,OAAS3a,EAAK2a,QAAU,EAC7Bxd,KAAK0J,OAAS7G,EAAK6G,OAAS,GAAK7G,EAAK6G,QAAU,EAAI7G,EAAK6G,OAAS,EAClE1J,KAAK+K,SAAW,EApBlBhO,EAAOD,QAAUgM,EA8BjBA,EAAQzJ,UAAUmN,SAAW,WAC3B,IAAI+Q,EAAKvd,KAAKud,GAAK7N,KAAK+N,IAAIzd,KAAKwd,OAAQxd,KAAK+K,YAC9C,GAAI/K,KAAK0J,OAAQ,CACf,IAAIgU,EAAQhO,KAAKiO,SACbC,EAAYlO,KAAKC,MAAM+N,EAAO1d,KAAK0J,OAAS6T,GAChDA,EAAoC,IAAN,EAAxB7N,KAAKC,MAAa,GAAP+N,IAAwBH,EAAKK,EAAYL,EAAKK,EAEjE,OAAgC,EAAzBlO,KAAKlG,IAAI+T,EAAIvd,KAAKyJ,MAS3BX,EAAQzJ,UAAUgN,MAAQ,WACxBrM,KAAK+K,SAAW,GASlBjC,EAAQzJ,UAAUmL,OAAS,SAAShB,GAClCxJ,KAAKud,GAAK/T,GASZV,EAAQzJ,UAAUuL,OAAS,SAASnB,GAClCzJ,KAAKyJ,IAAMA,GASbX,EAAQzJ,UAAUqL,UAAY,SAAShB,GACrC1J,KAAK0J,OAASA","file":"socket.io.min.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"io\"] = factory();\n\telse\n\t\troot[\"io\"] = factory();\n})((() => {\n if (typeof self !== 'undefined') {\n return self;\n } else if (typeof window !== 'undefined') {\n return window;\n } else if (typeof global !== 'undefined') {\n return global;\n } else {\n return Function('return this')();\n }\n })(), function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 18);\n","\r\n/**\r\n * Expose `Emitter`.\r\n */\r\n\r\nif (typeof module !== 'undefined') {\r\n module.exports = Emitter;\r\n}\r\n\r\n/**\r\n * Initialize a new `Emitter`.\r\n *\r\n * @api public\r\n */\r\n\r\nfunction Emitter(obj) {\r\n if (obj) return mixin(obj);\r\n};\r\n\r\n/**\r\n * Mixin the emitter properties.\r\n *\r\n * @param {Object} obj\r\n * @return {Object}\r\n * @api private\r\n */\r\n\r\nfunction mixin(obj) {\r\n for (var key in Emitter.prototype) {\r\n obj[key] = Emitter.prototype[key];\r\n }\r\n return obj;\r\n}\r\n\r\n/**\r\n * Listen on the given `event` with `fn`.\r\n *\r\n * @param {String} event\r\n * @param {Function} fn\r\n * @return {Emitter}\r\n * @api public\r\n */\r\n\r\nEmitter.prototype.on =\r\nEmitter.prototype.addEventListener = function(event, fn){\r\n this._callbacks = this._callbacks || {};\r\n (this._callbacks['$' + event] = this._callbacks['$' + event] || [])\r\n .push(fn);\r\n return this;\r\n};\r\n\r\n/**\r\n * Adds an `event` listener that will be invoked a single\r\n * time then automatically removed.\r\n *\r\n * @param {String} event\r\n * @param {Function} fn\r\n * @return {Emitter}\r\n * @api public\r\n */\r\n\r\nEmitter.prototype.once = function(event, fn){\r\n function on() {\r\n this.off(event, on);\r\n fn.apply(this, arguments);\r\n }\r\n\r\n on.fn = fn;\r\n this.on(event, on);\r\n return this;\r\n};\r\n\r\n/**\r\n * Remove the given callback for `event` or all\r\n * registered callbacks.\r\n *\r\n * @param {String} event\r\n * @param {Function} fn\r\n * @return {Emitter}\r\n * @api public\r\n */\r\n\r\nEmitter.prototype.off =\r\nEmitter.prototype.removeListener =\r\nEmitter.prototype.removeAllListeners =\r\nEmitter.prototype.removeEventListener = function(event, fn){\r\n this._callbacks = this._callbacks || {};\r\n\r\n // all\r\n if (0 == arguments.length) {\r\n this._callbacks = {};\r\n return this;\r\n }\r\n\r\n // specific event\r\n var callbacks = this._callbacks['$' + event];\r\n if (!callbacks) return this;\r\n\r\n // remove all handlers\r\n if (1 == arguments.length) {\r\n delete this._callbacks['$' + event];\r\n return this;\r\n }\r\n\r\n // remove specific handler\r\n var cb;\r\n for (var i = 0; i < callbacks.length; i++) {\r\n cb = callbacks[i];\r\n if (cb === fn || cb.fn === fn) {\r\n callbacks.splice(i, 1);\r\n break;\r\n }\r\n }\r\n\r\n // Remove event specific arrays for event types that no\r\n // one is subscribed for to avoid memory leak.\r\n if (callbacks.length === 0) {\r\n delete this._callbacks['$' + event];\r\n }\r\n\r\n return this;\r\n};\r\n\r\n/**\r\n * Emit `event` with the given args.\r\n *\r\n * @param {String} event\r\n * @param {Mixed} ...\r\n * @return {Emitter}\r\n */\r\n\r\nEmitter.prototype.emit = function(event){\r\n this._callbacks = this._callbacks || {};\r\n\r\n var args = new Array(arguments.length - 1)\r\n , callbacks = this._callbacks['$' + event];\r\n\r\n for (var i = 1; i < arguments.length; i++) {\r\n args[i - 1] = arguments[i];\r\n }\r\n\r\n if (callbacks) {\r\n callbacks = callbacks.slice(0);\r\n for (var i = 0, len = callbacks.length; i < len; ++i) {\r\n callbacks[i].apply(this, args);\r\n }\r\n }\r\n\r\n return this;\r\n};\r\n\r\n/**\r\n * Return array of callbacks for `event`.\r\n *\r\n * @param {String} event\r\n * @return {Array}\r\n * @api public\r\n */\r\n\r\nEmitter.prototype.listeners = function(event){\r\n this._callbacks = this._callbacks || {};\r\n return this._callbacks['$' + event] || [];\r\n};\r\n\r\n/**\r\n * Check if this emitter has `event` handlers.\r\n *\r\n * @param {String} event\r\n * @return {Boolean}\r\n * @api public\r\n */\r\n\r\nEmitter.prototype.hasListeners = function(event){\r\n return !! this.listeners(event).length;\r\n};\r\n","const encodePacket = require(\"./encodePacket\");\nconst decodePacket = require(\"./decodePacket\");\n\nconst SEPARATOR = String.fromCharCode(30); // see https://en.wikipedia.org/wiki/Delimiter#ASCII_delimited_text\n\nconst encodePayload = (packets, callback) => {\n // some packets may be added to the array while encoding, so the initial length must be saved\n const length = packets.length;\n const encodedPackets = new Array(length);\n let count = 0;\n\n packets.forEach((packet, i) => {\n // force base64 encoding for binary packets\n encodePacket(packet, false, encodedPacket => {\n encodedPackets[i] = encodedPacket;\n if (++count === length) {\n callback(encodedPackets.join(SEPARATOR));\n }\n });\n });\n};\n\nconst decodePayload = (encodedPayload, binaryType) => {\n const encodedPackets = encodedPayload.split(SEPARATOR);\n const packets = [];\n for (let i = 0; i < encodedPackets.length; i++) {\n const decodedPacket = decodePacket(encodedPackets[i], binaryType);\n packets.push(decodedPacket);\n if (decodedPacket.type === \"error\") {\n break;\n }\n }\n return packets;\n};\n\nmodule.exports = {\n protocol: 4,\n encodePacket,\n encodePayload,\n decodePacket,\n decodePayload\n};\n","module.exports = (() => {\n if (typeof self !== \"undefined\") {\n return self;\n } else if (typeof window !== \"undefined\") {\n return window;\n } else {\n return Function(\"return this\")();\n }\n})();\n","const parser = require(\"engine.io-parser\");\nconst Emitter = require(\"component-emitter\");\n\nclass Transport extends Emitter {\n /**\n * Transport abstract constructor.\n *\n * @param {Object} options.\n * @api private\n */\n constructor(opts) {\n super();\n\n this.opts = opts;\n this.query = opts.query;\n this.readyState = \"\";\n this.socket = opts.socket;\n }\n\n /**\n * Emits an error.\n *\n * @param {String} str\n * @return {Transport} for chaining\n * @api public\n */\n onError(msg, desc) {\n const err = new Error(msg);\n err.type = \"TransportError\";\n err.description = desc;\n this.emit(\"error\", err);\n return this;\n }\n\n /**\n * Opens the transport.\n *\n * @api public\n */\n open() {\n if (\"closed\" === this.readyState || \"\" === this.readyState) {\n this.readyState = \"opening\";\n this.doOpen();\n }\n\n return this;\n }\n\n /**\n * Closes the transport.\n *\n * @api private\n */\n close() {\n if (\"opening\" === this.readyState || \"open\" === this.readyState) {\n this.doClose();\n this.onClose();\n }\n\n return this;\n }\n\n /**\n * Sends multiple packets.\n *\n * @param {Array} packets\n * @api private\n */\n send(packets) {\n if (\"open\" === this.readyState) {\n this.write(packets);\n } else {\n throw new Error(\"Transport not open\");\n }\n }\n\n /**\n * Called upon open\n *\n * @api private\n */\n onOpen() {\n this.readyState = \"open\";\n this.writable = true;\n this.emit(\"open\");\n }\n\n /**\n * Called with data.\n *\n * @param {String} data\n * @api private\n */\n onData(data) {\n const packet = parser.decodePacket(data, this.socket.binaryType);\n this.onPacket(packet);\n }\n\n /**\n * Called with a decoded packet.\n */\n onPacket(packet) {\n this.emit(\"packet\", packet);\n }\n\n /**\n * Called upon close.\n *\n * @api private\n */\n onClose() {\n this.readyState = \"closed\";\n this.emit(\"close\");\n }\n}\n\nmodule.exports = Transport;\n","/**\n * Compiles a querystring\n * Returns string representation of the object\n *\n * @param {Object}\n * @api private\n */\n\nexports.encode = function (obj) {\n var str = '';\n\n for (var i in obj) {\n if (obj.hasOwnProperty(i)) {\n if (str.length) str += '&';\n str += encodeURIComponent(i) + '=' + encodeURIComponent(obj[i]);\n }\n }\n\n return str;\n};\n\n/**\n * Parses a simple querystring into an object\n *\n * @param {String} qs\n * @api private\n */\n\nexports.decode = function(qs){\n var qry = {};\n var pairs = qs.split('&');\n for (var i = 0, l = pairs.length; i < l; i++) {\n var pair = pairs[i].split('=');\n qry[decodeURIComponent(pair[0])] = decodeURIComponent(pair[1]);\n }\n return qry;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Decoder = exports.Encoder = exports.PacketType = exports.protocol = void 0;\nconst Emitter = require(\"component-emitter\");\nconst binary_1 = require(\"./binary\");\nconst is_binary_1 = require(\"./is-binary\");\n\n\n/**\n * Protocol version.\n *\n * @public\n */\nexports.protocol = 5;\nvar PacketType;\n(function (PacketType) {\n PacketType[PacketType[\"CONNECT\"] = 0] = \"CONNECT\";\n PacketType[PacketType[\"DISCONNECT\"] = 1] = \"DISCONNECT\";\n PacketType[PacketType[\"EVENT\"] = 2] = \"EVENT\";\n PacketType[PacketType[\"ACK\"] = 3] = \"ACK\";\n PacketType[PacketType[\"CONNECT_ERROR\"] = 4] = \"CONNECT_ERROR\";\n PacketType[PacketType[\"BINARY_EVENT\"] = 5] = \"BINARY_EVENT\";\n PacketType[PacketType[\"BINARY_ACK\"] = 6] = \"BINARY_ACK\";\n})(PacketType = exports.PacketType || (exports.PacketType = {}));\n/**\n * A socket.io Encoder instance\n */\nclass Encoder {\n /**\n * Encode a packet as a single string if non-binary, or as a\n * buffer sequence, depending on packet type.\n *\n * @param {Object} obj - packet object\n */\n encode(obj) {\n\n\n if (obj.type === PacketType.EVENT || obj.type === PacketType.ACK) {\n if (is_binary_1.hasBinary(obj)) {\n obj.type =\n obj.type === PacketType.EVENT\n ? PacketType.BINARY_EVENT\n : PacketType.BINARY_ACK;\n return this.encodeAsBinary(obj);\n }\n }\n return [this.encodeAsString(obj)];\n }\n /**\n * Encode packet as string.\n */\n encodeAsString(obj) {\n // first is type\n let str = \"\" + obj.type;\n // attachments if we have them\n if (obj.type === PacketType.BINARY_EVENT ||\n obj.type === PacketType.BINARY_ACK) {\n str += obj.attachments + \"-\";\n }\n // if we have a namespace other than `/`\n // we append it followed by a comma `,`\n if (obj.nsp && \"/\" !== obj.nsp) {\n str += obj.nsp + \",\";\n }\n // immediately followed by the id\n if (null != obj.id) {\n str += obj.id;\n }\n // json data\n if (null != obj.data) {\n str += JSON.stringify(obj.data);\n }\n\n\n return str;\n }\n /**\n * Encode packet as 'buffer sequence' by removing blobs, and\n * deconstructing packet into object with placeholders and\n * a list of buffers.\n */\n encodeAsBinary(obj) {\n const deconstruction = binary_1.deconstructPacket(obj);\n const pack = this.encodeAsString(deconstruction.packet);\n const buffers = deconstruction.buffers;\n buffers.unshift(pack); // add packet info to beginning of data list\n return buffers; // write all the buffers\n }\n}\nexports.Encoder = Encoder;\n/**\n * A socket.io Decoder instance\n *\n * @return {Object} decoder\n */\nclass Decoder extends Emitter {\n constructor() {\n super();\n }\n /**\n * Decodes an encoded packet string into packet JSON.\n *\n * @param {String} obj - encoded packet\n */\n add(obj) {\n let packet;\n if (typeof obj === \"string\") {\n packet = this.decodeString(obj);\n if (packet.type === PacketType.BINARY_EVENT ||\n packet.type === PacketType.BINARY_ACK) {\n // binary packet's json\n this.reconstructor = new BinaryReconstructor(packet);\n // no attachments, labeled binary but no binary data to follow\n if (packet.attachments === 0) {\n super.emit(\"decoded\", packet);\n }\n }\n else {\n // non-binary full packet\n super.emit(\"decoded\", packet);\n }\n }\n else if (is_binary_1.isBinary(obj) || obj.base64) {\n // raw binary data\n if (!this.reconstructor) {\n throw new Error(\"got binary data when not reconstructing a packet\");\n }\n else {\n packet = this.reconstructor.takeBinaryData(obj);\n if (packet) {\n // received final buffer\n this.reconstructor = null;\n super.emit(\"decoded\", packet);\n }\n }\n }\n else {\n throw new Error(\"Unknown type: \" + obj);\n }\n }\n /**\n * Decode a packet String (JSON data)\n *\n * @param {String} str\n * @return {Object} packet\n */\n decodeString(str) {\n let i = 0;\n // look up type\n const p = {\n type: Number(str.charAt(0)),\n };\n if (PacketType[p.type] === undefined) {\n throw new Error(\"unknown packet type \" + p.type);\n }\n // look up attachments if type binary\n if (p.type === PacketType.BINARY_EVENT ||\n p.type === PacketType.BINARY_ACK) {\n const start = i + 1;\n while (str.charAt(++i) !== \"-\" && i != str.length) { }\n const buf = str.substring(start, i);\n if (buf != Number(buf) || str.charAt(i) !== \"-\") {\n throw new Error(\"Illegal attachments\");\n }\n p.attachments = Number(buf);\n }\n // look up namespace (if any)\n if (\"/\" === str.charAt(i + 1)) {\n const start = i + 1;\n while (++i) {\n const c = str.charAt(i);\n if (\",\" === c)\n break;\n if (i === str.length)\n break;\n }\n p.nsp = str.substring(start, i);\n }\n else {\n p.nsp = \"/\";\n }\n // look up id\n const next = str.charAt(i + 1);\n if (\"\" !== next && Number(next) == next) {\n const start = i + 1;\n while (++i) {\n const c = str.charAt(i);\n if (null == c || Number(c) != c) {\n --i;\n break;\n }\n if (i === str.length)\n break;\n }\n p.id = Number(str.substring(start, i + 1));\n }\n // look up json data\n if (str.charAt(++i)) {\n const payload = tryParse(str.substr(i));\n if (Decoder.isPayloadValid(p.type, payload)) {\n p.data = payload;\n }\n else {\n throw new Error(\"invalid payload\");\n }\n }\n\n\n return p;\n }\n static isPayloadValid(type, payload) {\n switch (type) {\n case PacketType.CONNECT:\n return typeof payload === \"object\";\n case PacketType.DISCONNECT:\n return payload === undefined;\n case PacketType.CONNECT_ERROR:\n return typeof payload === \"string\" || typeof payload === \"object\";\n case PacketType.EVENT:\n case PacketType.BINARY_EVENT:\n return Array.isArray(payload) && typeof payload[0] === \"string\";\n case PacketType.ACK:\n case PacketType.BINARY_ACK:\n return Array.isArray(payload);\n }\n }\n /**\n * Deallocates a parser's resources\n */\n destroy() {\n if (this.reconstructor) {\n this.reconstructor.finishedReconstruction();\n }\n }\n}\nexports.Decoder = Decoder;\nfunction tryParse(str) {\n try {\n return JSON.parse(str);\n }\n catch (e) {\n return false;\n }\n}\n/**\n * A manager of a binary event's 'buffer sequence'. Should\n * be constructed whenever a packet of type BINARY_EVENT is\n * decoded.\n *\n * @param {Object} packet\n * @return {BinaryReconstructor} initialized reconstructor\n */\nclass BinaryReconstructor {\n constructor(packet) {\n this.packet = packet;\n this.buffers = [];\n this.reconPack = packet;\n }\n /**\n * Method to be called when binary data received from connection\n * after a BINARY_EVENT packet.\n *\n * @param {Buffer | ArrayBuffer} binData - the raw binary data received\n * @return {null | Object} returns null if more binary data is expected or\n * a reconstructed packet object if all buffers have been received.\n */\n takeBinaryData(binData) {\n this.buffers.push(binData);\n if (this.buffers.length === this.reconPack.attachments) {\n // done with buffer list\n const packet = binary_1.reconstructPacket(this.reconPack, this.buffers);\n this.finishedReconstruction();\n return packet;\n }\n return null;\n }\n /**\n * Cleans up binary packet reconstruction variables.\n */\n finishedReconstruction() {\n this.reconPack = null;\n this.buffers = [];\n }\n}\n","/**\n * Parses an URI\n *\n * @author Steven Levithan (MIT license)\n * @api private\n */\n\nvar re = /^(?:(?![^:@]+:[^:@\\/]*@)(http|https|ws|wss):\\/\\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?((?:[a-f0-9]{0,4}:){2,7}[a-f0-9]{0,4}|[^:\\/?#]*)(?::(\\d*))?)(((\\/(?:[^?#](?![^?#\\/]*\\.[^?#\\/.]+(?:[?#]|$)))*\\/?)?([^?#\\/]*))(?:\\?([^#]*))?(?:#(.*))?)/;\n\nvar parts = [\n 'source', 'protocol', 'authority', 'userInfo', 'user', 'password', 'host', 'port', 'relative', 'path', 'directory', 'file', 'query', 'anchor'\n];\n\nmodule.exports = function parseuri(str) {\n var src = str,\n b = str.indexOf('['),\n e = str.indexOf(']');\n\n if (b != -1 && e != -1) {\n str = str.substring(0, b) + str.substring(b, e).replace(/:/g, ';') + str.substring(e, str.length);\n }\n\n var m = re.exec(str || ''),\n uri = {},\n i = 14;\n\n while (i--) {\n uri[parts[i]] = m[i] || '';\n }\n\n if (b != -1 && e != -1) {\n uri.source = src;\n uri.host = uri.host.substring(1, uri.host.length - 1).replace(/;/g, ':');\n uri.authority = uri.authority.replace('[', '').replace(']', '').replace(/;/g, ':');\n uri.ipv6uri = true;\n }\n\n uri.pathNames = pathNames(uri, uri['path']);\n uri.queryKey = queryKey(uri, uri['query']);\n\n return uri;\n};\n\nfunction pathNames(obj, path) {\n var regx = /\\/{2,9}/g,\n names = path.replace(regx, \"/\").split(\"/\");\n\n if (path.substr(0, 1) == '/' || path.length === 0) {\n names.splice(0, 1);\n }\n if (path.substr(path.length - 1, 1) == '/') {\n names.splice(names.length - 1, 1);\n }\n\n return names;\n}\n\nfunction queryKey(uri, query) {\n var data = {};\n\n query.replace(/(?:^|&)([^&=]*)=?([^&]*)/g, function ($0, $1, $2) {\n if ($1) {\n data[$1] = $2;\n }\n });\n\n return data;\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Manager = void 0;\nconst eio = require(\"engine.io-client\");\nconst socket_1 = require(\"./socket\");\nconst Emitter = require(\"component-emitter\");\nconst parser = require(\"socket.io-parser\");\nconst on_1 = require(\"./on\");\nconst bind = require(\"component-bind\");\nconst Backoff = require(\"backo2\");\n\n\nclass Manager extends Emitter {\n constructor(uri, opts) {\n super();\n this.nsps = {};\n this.subs = [];\n if (uri && \"object\" === typeof uri) {\n opts = uri;\n uri = undefined;\n }\n opts = opts || {};\n opts.path = opts.path || \"/socket.io\";\n this.opts = opts;\n this.reconnection(opts.reconnection !== false);\n this.reconnectionAttempts(opts.reconnectionAttempts || Infinity);\n this.reconnectionDelay(opts.reconnectionDelay || 1000);\n this.reconnectionDelayMax(opts.reconnectionDelayMax || 5000);\n this.randomizationFactor(opts.randomizationFactor || 0.5);\n this.backoff = new Backoff({\n min: this.reconnectionDelay(),\n max: this.reconnectionDelayMax(),\n jitter: this.randomizationFactor(),\n });\n this.timeout(null == opts.timeout ? 20000 : opts.timeout);\n this._readyState = \"closed\";\n this.uri = uri;\n const _parser = opts.parser || parser;\n this.encoder = new _parser.Encoder();\n this.decoder = new _parser.Decoder();\n this._autoConnect = opts.autoConnect !== false;\n if (this._autoConnect)\n this.open();\n }\n reconnection(v) {\n if (!arguments.length)\n return this._reconnection;\n this._reconnection = !!v;\n return this;\n }\n reconnectionAttempts(v) {\n if (v === undefined)\n return this._reconnectionAttempts;\n this._reconnectionAttempts = v;\n return this;\n }\n reconnectionDelay(v) {\n var _a;\n if (v === undefined)\n return this._reconnectionDelay;\n this._reconnectionDelay = v;\n (_a = this.backoff) === null || _a === void 0 ? void 0 : _a.setMin(v);\n return this;\n }\n randomizationFactor(v) {\n var _a;\n if (v === undefined)\n return this._randomizationFactor;\n this._randomizationFactor = v;\n (_a = this.backoff) === null || _a === void 0 ? void 0 : _a.setJitter(v);\n return this;\n }\n reconnectionDelayMax(v) {\n var _a;\n if (v === undefined)\n return this._reconnectionDelayMax;\n this._reconnectionDelayMax = v;\n (_a = this.backoff) === null || _a === void 0 ? void 0 : _a.setMax(v);\n return this;\n }\n timeout(v) {\n if (!arguments.length)\n return this._timeout;\n this._timeout = v;\n return this;\n }\n /**\n * Starts trying to reconnect if reconnection is enabled and we have not\n * started reconnecting yet\n *\n * @private\n */\n maybeReconnectOnOpen() {\n // Only try to reconnect if it's the first time we're connecting\n if (!this._reconnecting &&\n this._reconnection &&\n this.backoff.attempts === 0) {\n // keeps reconnection from firing twice for the same reconnection loop\n this.reconnect();\n }\n }\n /**\n * Sets the current transport `socket`.\n *\n * @param {Function} fn - optional, callback\n * @return self\n * @public\n */\n open(fn) {\n\n\n if (~this._readyState.indexOf(\"open\"))\n return this;\n\n\n this.engine = eio(this.uri, this.opts);\n const socket = this.engine;\n const self = this;\n this._readyState = \"opening\";\n this.skipReconnect = false;\n // emit `open`\n const openSub = on_1.on(socket, \"open\", function () {\n self.onopen();\n fn && fn();\n });\n // emit `error`\n const errorSub = on_1.on(socket, \"error\", (err) => {\n\n\n self.cleanup();\n self._readyState = \"closed\";\n super.emit(\"error\", err);\n if (fn) {\n fn(err);\n }\n else {\n // Only do this if there is no fn to handle the error\n self.maybeReconnectOnOpen();\n }\n });\n if (false !== this._timeout) {\n const timeout = this._timeout;\n\n\n if (timeout === 0) {\n openSub.destroy(); // prevents a race condition with the 'open' event\n }\n // set timer\n const timer = setTimeout(() => {\n\n\n openSub.destroy();\n socket.close();\n socket.emit(\"error\", new Error(\"timeout\"));\n }, timeout);\n this.subs.push({\n destroy: function () {\n clearTimeout(timer);\n },\n });\n }\n this.subs.push(openSub);\n this.subs.push(errorSub);\n return this;\n }\n /**\n * Alias for open()\n *\n * @return {Manager} self\n * @public\n */\n connect(fn) {\n return this.open(fn);\n }\n /**\n * Called upon transport open.\n *\n * @private\n */\n onopen() {\n\n\n // clear old subs\n this.cleanup();\n // mark as open\n this._readyState = \"open\";\n super.emit(\"open\");\n // add new subs\n const socket = this.engine;\n this.subs.push(on_1.on(socket, \"data\", bind(this, \"ondata\")), on_1.on(socket, \"ping\", bind(this, \"onping\")), on_1.on(socket, \"error\", bind(this, \"onerror\")), on_1.on(socket, \"close\", bind(this, \"onclose\")), on_1.on(this.decoder, \"decoded\", bind(this, \"ondecoded\")));\n }\n /**\n * Called upon a ping.\n *\n * @private\n */\n onping() {\n super.emit(\"ping\");\n }\n /**\n * Called with data.\n *\n * @private\n */\n ondata(data) {\n this.decoder.add(data);\n }\n /**\n * Called when parser fully decodes a packet.\n *\n * @private\n */\n ondecoded(packet) {\n super.emit(\"packet\", packet);\n }\n /**\n * Called upon socket error.\n *\n * @private\n */\n onerror(err) {\n\n\n super.emit(\"error\", err);\n }\n /**\n * Creates a new socket for the given `nsp`.\n *\n * @return {Socket}\n * @public\n */\n socket(nsp, opts) {\n let socket = this.nsps[nsp];\n if (!socket) {\n socket = new socket_1.Socket(this, nsp, opts);\n this.nsps[nsp] = socket;\n }\n return socket;\n }\n /**\n * Called upon a socket close.\n *\n * @param socket\n * @private\n */\n _destroy(socket) {\n const nsps = Object.keys(this.nsps);\n for (const nsp of nsps) {\n const socket = this.nsps[nsp];\n if (socket.active) {\n\n\n return;\n }\n }\n this._close();\n }\n /**\n * Writes a packet.\n *\n * @param packet\n * @private\n */\n _packet(packet) {\n\n\n if (packet.query && packet.type === 0)\n packet.nsp += \"?\" + packet.query;\n const encodedPackets = this.encoder.encode(packet);\n for (let i = 0; i < encodedPackets.length; i++) {\n this.engine.write(encodedPackets[i], packet.options);\n }\n }\n /**\n * Clean up transport subscriptions and packet buffer.\n *\n * @private\n */\n cleanup() {\n\n\n const subsLength = this.subs.length;\n for (let i = 0; i < subsLength; i++) {\n const sub = this.subs.shift();\n sub.destroy();\n }\n this.decoder.destroy();\n }\n /**\n * Close the current socket.\n *\n * @private\n */\n _close() {\n\n\n this.skipReconnect = true;\n this._reconnecting = false;\n if (\"opening\" === this._readyState) {\n // `onclose` will not fire because\n // an open event never happened\n this.cleanup();\n }\n this.backoff.reset();\n this._readyState = \"closed\";\n if (this.engine)\n this.engine.close();\n }\n /**\n * Alias for close()\n *\n * @private\n */\n disconnect() {\n return this._close();\n }\n /**\n * Called upon engine close.\n *\n * @private\n */\n onclose(reason) {\n\n\n this.cleanup();\n this.backoff.reset();\n this._readyState = \"closed\";\n super.emit(\"close\", reason);\n if (this._reconnection && !this.skipReconnect) {\n this.reconnect();\n }\n }\n /**\n * Attempt a reconnection.\n *\n * @private\n */\n reconnect() {\n if (this._reconnecting || this.skipReconnect)\n return this;\n const self = this;\n if (this.backoff.attempts >= this._reconnectionAttempts) {\n\n\n this.backoff.reset();\n super.emit(\"reconnect_failed\");\n this._reconnecting = false;\n }\n else {\n const delay = this.backoff.duration();\n\n\n this._reconnecting = true;\n const timer = setTimeout(() => {\n if (self.skipReconnect)\n return;\n\n\n super.emit(\"reconnect_attempt\", self.backoff.attempts);\n // check again for the case socket closed in above events\n if (self.skipReconnect)\n return;\n self.open((err) => {\n if (err) {\n\n\n self._reconnecting = false;\n self.reconnect();\n super.emit(\"reconnect_error\", err);\n }\n else {\n\n\n self.onreconnect();\n }\n });\n }, delay);\n this.subs.push({\n destroy: function () {\n clearTimeout(timer);\n },\n });\n }\n }\n /**\n * Called upon successful reconnect.\n *\n * @private\n */\n onreconnect() {\n const attempt = this.backoff.attempts;\n this._reconnecting = false;\n this.backoff.reset();\n super.emit(\"reconnect\", attempt);\n }\n}\nexports.Manager = Manager;\n","const XMLHttpRequest = require(\"xmlhttprequest-ssl\");\nconst XHR = require(\"./polling-xhr\");\nconst JSONP = require(\"./polling-jsonp\");\nconst websocket = require(\"./websocket\");\n\nexports.polling = polling;\nexports.websocket = websocket;\n\n/**\n * Polling transport polymorphic constructor.\n * Decides on xhr vs jsonp based on feature detection.\n *\n * @api private\n */\n\nfunction polling(opts) {\n let xhr;\n let xd = false;\n let xs = false;\n const jsonp = false !== opts.jsonp;\n\n if (typeof location !== \"undefined\") {\n const isSSL = \"https:\" === location.protocol;\n let port = location.port;\n\n // some user agents have empty `location.port`\n if (!port) {\n port = isSSL ? 443 : 80;\n }\n\n xd = opts.hostname !== location.hostname || port !== opts.port;\n xs = opts.secure !== isSSL;\n }\n\n opts.xdomain = xd;\n opts.xscheme = xs;\n xhr = new XMLHttpRequest(opts);\n\n if (\"open\" in xhr && !opts.forceJSONP) {\n return new XHR(opts);\n } else {\n if (!jsonp) throw new Error(\"JSONP disabled\");\n return new JSONP(opts);\n }\n}\n","// browser shim for xmlhttprequest module\n\nconst hasCORS = require(\"has-cors\");\nconst globalThis = require(\"./globalThis\");\n\nmodule.exports = function(opts) {\n const xdomain = opts.xdomain;\n\n // scheme must be same when usign XDomainRequest\n // http://blogs.msdn.com/b/ieinternals/archive/2010/05/13/xdomainrequest-restrictions-limitations-and-workarounds.aspx\n const xscheme = opts.xscheme;\n\n // XDomainRequest has a flow of not sending cookie, therefore it should be disabled as a default.\n // https://github.com/Automattic/engine.io-client/pull/217\n const enablesXDR = opts.enablesXDR;\n\n // XMLHttpRequest can be disabled on IE\n try {\n if (\"undefined\" !== typeof XMLHttpRequest && (!xdomain || hasCORS)) {\n return new XMLHttpRequest();\n }\n } catch (e) {}\n\n // Use XDomainRequest for IE8 if enablesXDR is true\n // because loading bar keeps flashing when using jsonp-polling\n // https://github.com/yujiosaka/socke.io-ie8-loading-example\n try {\n if (\"undefined\" !== typeof XDomainRequest && !xscheme && enablesXDR) {\n return new XDomainRequest();\n }\n } catch (e) {}\n\n if (!xdomain) {\n try {\n return new globalThis[[\"Active\"].concat(\"Object\").join(\"X\")](\n \"Microsoft.XMLHTTP\"\n );\n } catch (e) {}\n }\n};\n","const Transport = require(\"../transport\");\nconst parseqs = require(\"parseqs\");\nconst parser = require(\"engine.io-parser\");\nconst yeast = require(\"yeast\");\n\n\n\n\nclass Polling extends Transport {\n /**\n * Transport name.\n */\n get name() {\n return \"polling\";\n }\n\n /**\n * Opens the socket (triggers polling). We write a PING message to determine\n * when the transport is open.\n *\n * @api private\n */\n doOpen() {\n this.poll();\n }\n\n /**\n * Pauses polling.\n *\n * @param {Function} callback upon buffers are flushed and transport is paused\n * @api private\n */\n pause(onPause) {\n const self = this;\n\n this.readyState = \"pausing\";\n\n function pause() {\n\n\n self.readyState = \"paused\";\n onPause();\n }\n\n if (this.polling || !this.writable) {\n let total = 0;\n\n if (this.polling) {\n\n\n total++;\n this.once(\"pollComplete\", function() {\n\n\n --total || pause();\n });\n }\n\n if (!this.writable) {\n\n\n total++;\n this.once(\"drain\", function() {\n\n\n --total || pause();\n });\n }\n } else {\n pause();\n }\n }\n\n /**\n * Starts polling cycle.\n *\n * @api public\n */\n poll() {\n\n\n this.polling = true;\n this.doPoll();\n this.emit(\"poll\");\n }\n\n /**\n * Overloads onData to detect payloads.\n *\n * @api private\n */\n onData(data) {\n const self = this;\n\n\n const callback = function(packet, index, total) {\n // if its the first message we consider the transport open\n if (\"opening\" === self.readyState && packet.type === \"open\") {\n self.onOpen();\n }\n\n // if its a close packet, we close the ongoing requests\n if (\"close\" === packet.type) {\n self.onClose();\n return false;\n }\n\n // otherwise bypass onData and handle the message\n self.onPacket(packet);\n };\n\n // decode payload\n parser.decodePayload(data, this.socket.binaryType).forEach(callback);\n\n // if an event did not trigger closing\n if (\"closed\" !== this.readyState) {\n // if we got data we're not polling\n this.polling = false;\n this.emit(\"pollComplete\");\n\n if (\"open\" === this.readyState) {\n this.poll();\n } else {\n\n\n }\n }\n }\n\n /**\n * For polling, send a close packet.\n *\n * @api private\n */\n doClose() {\n const self = this;\n\n function close() {\n\n\n self.write([{ type: \"close\" }]);\n }\n\n if (\"open\" === this.readyState) {\n\n\n close();\n } else {\n // in case we're trying to close while\n // handshaking is in progress (GH-164)\n\n\n this.once(\"open\", close);\n }\n }\n\n /**\n * Writes a packets payload.\n *\n * @param {Array} data packets\n * @param {Function} drain callback\n * @api private\n */\n write(packets) {\n this.writable = false;\n\n parser.encodePayload(packets, data => {\n this.doWrite(data, () => {\n this.writable = true;\n this.emit(\"drain\");\n });\n });\n }\n\n /**\n * Generates uri for connection.\n *\n * @api private\n */\n uri() {\n let query = this.query || {};\n const schema = this.opts.secure ? \"https\" : \"http\";\n let port = \"\";\n\n // cache busting is forced\n if (false !== this.opts.timestampRequests) {\n query[this.opts.timestampParam] = yeast();\n }\n\n if (!this.supportsBinary && !query.sid) {\n query.b64 = 1;\n }\n\n query = parseqs.encode(query);\n\n // avoid port if default for schema\n if (\n this.opts.port &&\n ((\"https\" === schema && Number(this.opts.port) !== 443) ||\n (\"http\" === schema && Number(this.opts.port) !== 80))\n ) {\n port = \":\" + this.opts.port;\n }\n\n // prepend ? to query\n if (query.length) {\n query = \"?\" + query;\n }\n\n const ipv6 = this.opts.hostname.indexOf(\":\") !== -1;\n return (\n schema +\n \"://\" +\n (ipv6 ? \"[\" + this.opts.hostname + \"]\" : this.opts.hostname) +\n port +\n this.opts.path +\n query\n );\n }\n}\n\nmodule.exports = Polling;\n","const PACKET_TYPES = Object.create(null); // no Map = no polyfill\nPACKET_TYPES[\"open\"] = \"0\";\nPACKET_TYPES[\"close\"] = \"1\";\nPACKET_TYPES[\"ping\"] = \"2\";\nPACKET_TYPES[\"pong\"] = \"3\";\nPACKET_TYPES[\"message\"] = \"4\";\nPACKET_TYPES[\"upgrade\"] = \"5\";\nPACKET_TYPES[\"noop\"] = \"6\";\n\nconst PACKET_TYPES_REVERSE = Object.create(null);\nObject.keys(PACKET_TYPES).forEach(key => {\n PACKET_TYPES_REVERSE[PACKET_TYPES[key]] = key;\n});\n\nconst ERROR_PACKET = { type: \"error\", data: \"parser error\" };\n\nmodule.exports = {\n PACKET_TYPES,\n PACKET_TYPES_REVERSE,\n ERROR_PACKET\n};\n","'use strict';\n\nvar alphabet = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_'.split('')\n , length = 64\n , map = {}\n , seed = 0\n , i = 0\n , prev;\n\n/**\n * Return a string representing the specified number.\n *\n * @param {Number} num The number to convert.\n * @returns {String} The string representation of the number.\n * @api public\n */\nfunction encode(num) {\n var encoded = '';\n\n do {\n encoded = alphabet[num % length] + encoded;\n num = Math.floor(num / length);\n } while (num > 0);\n\n return encoded;\n}\n\n/**\n * Return the integer value specified by the given string.\n *\n * @param {String} str The string to convert.\n * @returns {Number} The integer value represented by the string.\n * @api public\n */\nfunction decode(str) {\n var decoded = 0;\n\n for (i = 0; i < str.length; i++) {\n decoded = decoded * length + map[str.charAt(i)];\n }\n\n return decoded;\n}\n\n/**\n * Yeast: A tiny growing id generator.\n *\n * @returns {String} A unique id.\n * @api public\n */\nfunction yeast() {\n var now = encode(+new Date());\n\n if (now !== prev) return seed = 0, prev = now;\n return now +'.'+ encode(seed++);\n}\n\n//\n// Map each character to its index.\n//\nfor (; i < length; i++) map[alphabet[i]] = i;\n\n//\n// Expose the `yeast`, `encode` and `decode` functions.\n//\nyeast.encode = encode;\nyeast.decode = decode;\nmodule.exports = yeast;\n","module.exports.pick = (obj, ...attr) => {\n return attr.reduce((acc, k) => {\n acc[k] = obj[k];\n return acc;\n }, {});\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Socket = void 0;\nconst socket_io_parser_1 = require(\"socket.io-parser\");\nconst Emitter = require(\"component-emitter\");\nconst on_1 = require(\"./on\");\nconst bind = require(\"component-bind\");\n\n\n/**\n * Internal events.\n * These events can't be emitted by the user.\n */\nconst RESERVED_EVENTS = Object.freeze({\n connect: 1,\n connect_error: 1,\n disconnect: 1,\n disconnecting: 1,\n // EventEmitter reserved events: https://nodejs.org/api/events.html#events_event_newlistener\n newListener: 1,\n removeListener: 1,\n});\nclass Socket extends Emitter {\n /**\n * `Socket` constructor.\n *\n * @public\n */\n constructor(io, nsp, opts) {\n super();\n this.ids = 0;\n this.acks = {};\n this.receiveBuffer = [];\n this.sendBuffer = [];\n this.flags = {};\n this.io = io;\n this.nsp = nsp;\n this.ids = 0;\n this.acks = {};\n this.receiveBuffer = [];\n this.sendBuffer = [];\n this.connected = false;\n this.disconnected = true;\n this.flags = {};\n if (opts && opts.auth) {\n this.auth = opts.auth;\n }\n if (this.io._autoConnect)\n this.open();\n }\n /**\n * Subscribe to open, close and packet events\n *\n * @private\n */\n subEvents() {\n if (this.subs)\n return;\n const io = this.io;\n this.subs = [\n on_1.on(io, \"open\", bind(this, \"onopen\")),\n on_1.on(io, \"packet\", bind(this, \"onpacket\")),\n on_1.on(io, \"close\", bind(this, \"onclose\")),\n ];\n }\n /**\n * Whether the Socket will try to reconnect when its Manager connects or reconnects\n */\n get active() {\n return !!this.subs;\n }\n /**\n * \"Opens\" the socket.\n *\n * @public\n */\n connect() {\n if (this.connected)\n return this;\n this.subEvents();\n if (!this.io[\"_reconnecting\"])\n this.io.open(); // ensure open\n if (\"open\" === this.io._readyState)\n this.onopen();\n return this;\n }\n /**\n * Alias for connect()\n */\n open() {\n return this.connect();\n }\n /**\n * Sends a `message` event.\n *\n * @return self\n * @public\n */\n send(...args) {\n args.unshift(\"message\");\n this.emit.apply(this, args);\n return this;\n }\n /**\n * Override `emit`.\n * If the event is in `events`, it's emitted normally.\n *\n * @param ev - event name\n * @return self\n * @public\n */\n emit(ev, ...args) {\n if (RESERVED_EVENTS.hasOwnProperty(ev)) {\n throw new Error('\"' + ev + '\" is a reserved event name');\n }\n args.unshift(ev);\n const packet = {\n type: socket_io_parser_1.PacketType.EVENT,\n data: args,\n };\n packet.options = {};\n packet.options.compress = this.flags.compress !== false;\n // event ack callback\n if (\"function\" === typeof args[args.length - 1]) {\n\n\n this.acks[this.ids] = args.pop();\n packet.id = this.ids++;\n }\n const isTransportWritable = this.io.engine &&\n this.io.engine.transport &&\n this.io.engine.transport.writable;\n const discardPacket = this.flags.volatile && (!isTransportWritable || !this.connected);\n if (discardPacket) {\n\n\n }\n else if (this.connected) {\n this.packet(packet);\n }\n else {\n this.sendBuffer.push(packet);\n }\n this.flags = {};\n return this;\n }\n /**\n * Sends a packet.\n *\n * @param packet\n * @private\n */\n packet(packet) {\n packet.nsp = this.nsp;\n this.io._packet(packet);\n }\n /**\n * Called upon engine `open`.\n *\n * @private\n */\n onopen() {\n\n\n if (typeof this.auth == \"function\") {\n this.auth((data) => {\n this.packet({ type: socket_io_parser_1.PacketType.CONNECT, data });\n });\n }\n else {\n this.packet({ type: socket_io_parser_1.PacketType.CONNECT, data: this.auth });\n }\n }\n /**\n * Called upon engine `close`.\n *\n * @param reason\n * @private\n */\n onclose(reason) {\n\n\n this.connected = false;\n this.disconnected = true;\n delete this.id;\n super.emit(\"disconnect\", reason);\n }\n /**\n * Called with socket packet.\n *\n * @param packet\n * @private\n */\n onpacket(packet) {\n const sameNamespace = packet.nsp === this.nsp;\n if (!sameNamespace)\n return;\n switch (packet.type) {\n case socket_io_parser_1.PacketType.CONNECT:\n if (packet.data && packet.data.sid) {\n const id = packet.data.sid;\n this.onconnect(id);\n }\n else {\n super.emit(\"connect_error\", new Error(\"It seems you are trying to reach a Socket.IO server in v2.x with a v3.x client, but they are not compatible (more information here: https://socket.io/docs/v3/migrating-from-2-x-to-3-0/)\"));\n }\n break;\n case socket_io_parser_1.PacketType.EVENT:\n this.onevent(packet);\n break;\n case socket_io_parser_1.PacketType.BINARY_EVENT:\n this.onevent(packet);\n break;\n case socket_io_parser_1.PacketType.ACK:\n this.onack(packet);\n break;\n case socket_io_parser_1.PacketType.BINARY_ACK:\n this.onack(packet);\n break;\n case socket_io_parser_1.PacketType.DISCONNECT:\n this.ondisconnect();\n break;\n case socket_io_parser_1.PacketType.CONNECT_ERROR:\n const err = new Error(packet.data.message);\n // @ts-ignore\n err.data = packet.data.data;\n super.emit(\"connect_error\", err);\n break;\n }\n }\n /**\n * Called upon a server event.\n *\n * @param packet\n * @private\n */\n onevent(packet) {\n const args = packet.data || [];\n\n\n if (null != packet.id) {\n\n\n args.push(this.ack(packet.id));\n }\n if (this.connected) {\n this.emitEvent(args);\n }\n else {\n this.receiveBuffer.push(Object.freeze(args));\n }\n }\n emitEvent(args) {\n if (this._anyListeners && this._anyListeners.length) {\n const listeners = this._anyListeners.slice();\n for (const listener of listeners) {\n listener.apply(this, args);\n }\n }\n super.emit.apply(this, args);\n }\n /**\n * Produces an ack callback to emit with an event.\n *\n * @private\n */\n ack(id) {\n const self = this;\n let sent = false;\n return function (...args) {\n // prevent double callbacks\n if (sent)\n return;\n sent = true;\n\n\n self.packet({\n type: socket_io_parser_1.PacketType.ACK,\n id: id,\n data: args,\n });\n };\n }\n /**\n * Called upon a server acknowlegement.\n *\n * @param packet\n * @private\n */\n onack(packet) {\n const ack = this.acks[packet.id];\n if (\"function\" === typeof ack) {\n\n\n ack.apply(this, packet.data);\n delete this.acks[packet.id];\n }\n else {\n\n\n }\n }\n /**\n * Called upon server connect.\n *\n * @private\n */\n onconnect(id) {\n\n\n this.id = id;\n this.connected = true;\n this.disconnected = false;\n super.emit(\"connect\");\n this.emitBuffered();\n }\n /**\n * Emit buffered events (received and emitted).\n *\n * @private\n */\n emitBuffered() {\n this.receiveBuffer.forEach((args) => this.emitEvent(args));\n this.receiveBuffer = [];\n this.sendBuffer.forEach((packet) => this.packet(packet));\n this.sendBuffer = [];\n }\n /**\n * Called upon server disconnect.\n *\n * @private\n */\n ondisconnect() {\n\n\n this.destroy();\n this.onclose(\"io server disconnect\");\n }\n /**\n * Called upon forced client/server side disconnections,\n * this method ensures the manager stops tracking us and\n * that reconnections don't get triggered for this.\n *\n * @private\n */\n destroy() {\n if (this.subs) {\n // clean subscriptions to avoid reconnections\n for (let i = 0; i < this.subs.length; i++) {\n this.subs[i].destroy();\n }\n this.subs = null;\n }\n this.io[\"_destroy\"](this);\n }\n /**\n * Disconnects the socket manually.\n *\n * @return self\n * @public\n */\n disconnect() {\n if (this.connected) {\n\n\n this.packet({ type: socket_io_parser_1.PacketType.DISCONNECT });\n }\n // remove socket from pool\n this.destroy();\n if (this.connected) {\n // fire events\n this.onclose(\"io client disconnect\");\n }\n return this;\n }\n /**\n * Alias for disconnect()\n *\n * @return self\n * @public\n */\n close() {\n return this.disconnect();\n }\n /**\n * Sets the compress flag.\n *\n * @param compress - if `true`, compresses the sending data\n * @return self\n * @public\n */\n compress(compress) {\n this.flags.compress = compress;\n return this;\n }\n /**\n * Sets a modifier for a subsequent event emission that the event message will be dropped when this socket is not\n * ready to send messages.\n *\n * @returns self\n * @public\n */\n get volatile() {\n this.flags.volatile = true;\n return this;\n }\n /**\n * Adds a listener that will be fired when any event is emitted. The event name is passed as the first argument to the\n * callback.\n *\n * @param listener\n * @public\n */\n onAny(listener) {\n this._anyListeners = this._anyListeners || [];\n this._anyListeners.push(listener);\n return this;\n }\n /**\n * Adds a listener that will be fired when any event is emitted. The event name is passed as the first argument to the\n * callback. The listener is added to the beginning of the listeners array.\n *\n * @param listener\n * @public\n */\n prependAny(listener) {\n this._anyListeners = this._anyListeners || [];\n this._anyListeners.unshift(listener);\n return this;\n }\n /**\n * Removes the listener that will be fired when any event is emitted.\n *\n * @param listener\n * @public\n */\n offAny(listener) {\n if (!this._anyListeners) {\n return this;\n }\n if (listener) {\n const listeners = this._anyListeners;\n for (let i = 0; i < listeners.length; i++) {\n if (listener === listeners[i]) {\n listeners.splice(i, 1);\n return this;\n }\n }\n }\n else {\n this._anyListeners = [];\n }\n return this;\n }\n /**\n * Returns an array of listeners that are listening for any event that is specified. This array can be manipulated,\n * e.g. to remove listeners.\n *\n * @public\n */\n listenersAny() {\n return this._anyListeners || [];\n }\n}\nexports.Socket = Socket;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.hasBinary = exports.isBinary = void 0;\nconst withNativeArrayBuffer = typeof ArrayBuffer === \"function\";\nconst isView = (obj) => {\n return typeof ArrayBuffer.isView === \"function\"\n ? ArrayBuffer.isView(obj)\n : obj.buffer instanceof ArrayBuffer;\n};\nconst toString = Object.prototype.toString;\nconst withNativeBlob = typeof Blob === \"function\" ||\n (typeof Blob !== \"undefined\" &&\n toString.call(Blob) === \"[object BlobConstructor]\");\nconst withNativeFile = typeof File === \"function\" ||\n (typeof File !== \"undefined\" &&\n toString.call(File) === \"[object FileConstructor]\");\n/**\n * Returns true if obj is a Buffer, an ArrayBuffer, a Blob or a File.\n *\n * @private\n */\nfunction isBinary(obj) {\n return ((withNativeArrayBuffer && (obj instanceof ArrayBuffer || isView(obj))) ||\n (withNativeBlob && obj instanceof Blob) ||\n (withNativeFile && obj instanceof File));\n}\nexports.isBinary = isBinary;\nfunction hasBinary(obj, toJSON) {\n if (!obj || typeof obj !== \"object\") {\n return false;\n }\n if (Array.isArray(obj)) {\n for (let i = 0, l = obj.length; i < l; i++) {\n if (hasBinary(obj[i])) {\n return true;\n }\n }\n return false;\n }\n if (isBinary(obj)) {\n return true;\n }\n if (obj.toJSON &&\n typeof obj.toJSON === \"function\" &&\n arguments.length === 1) {\n return hasBinary(obj.toJSON(), true);\n }\n for (const key in obj) {\n if (Object.prototype.hasOwnProperty.call(obj, key) && hasBinary(obj[key])) {\n return true;\n }\n }\n return false;\n}\nexports.hasBinary = hasBinary;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.on = void 0;\nfunction on(obj, ev, fn) {\n obj.on(ev, fn);\n return {\n destroy: function () {\n obj.off(ev, fn);\n },\n };\n}\nexports.on = on;\n","/**\n * Slice reference.\n */\n\nvar slice = [].slice;\n\n/**\n * Bind `obj` to `fn`.\n *\n * @param {Object} obj\n * @param {Function|String} fn or string\n * @return {Function}\n * @api public\n */\n\nmodule.exports = function(obj, fn){\n if ('string' == typeof fn) fn = obj[fn];\n if ('function' != typeof fn) throw new Error('bind() requires a function');\n var args = slice.call(arguments, 2);\n return function(){\n return fn.apply(obj, args.concat(slice.call(arguments)));\n }\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Socket = exports.io = exports.Manager = exports.protocol = void 0;\nconst url_1 = require(\"./url\");\nconst manager_1 = require(\"./manager\");\nconst socket_1 = require(\"./socket\");\nObject.defineProperty(exports, \"Socket\", { enumerable: true, get: function () { return socket_1.Socket; } });\n\n\n/**\n * Module exports.\n */\nmodule.exports = exports = lookup;\n/**\n * Managers cache.\n */\nconst cache = (exports.managers = {});\nfunction lookup(uri, opts) {\n if (typeof uri === \"object\") {\n opts = uri;\n uri = undefined;\n }\n opts = opts || {};\n const parsed = url_1.url(uri);\n const source = parsed.source;\n const id = parsed.id;\n const path = parsed.path;\n const sameNamespace = cache[id] && path in cache[id][\"nsps\"];\n const newConnection = opts.forceNew ||\n opts[\"force new connection\"] ||\n false === opts.multiplex ||\n sameNamespace;\n let io;\n if (newConnection) {\n\n\n io = new manager_1.Manager(source, opts);\n }\n else {\n if (!cache[id]) {\n\n\n cache[id] = new manager_1.Manager(source, opts);\n }\n io = cache[id];\n }\n if (parsed.query && !opts.query) {\n opts.query = parsed.query;\n }\n return io.socket(parsed.path, opts);\n}\nexports.io = lookup;\n/**\n * Protocol version.\n *\n * @public\n */\nvar socket_io_parser_1 = require(\"socket.io-parser\");\nObject.defineProperty(exports, \"protocol\", { enumerable: true, get: function () { return socket_io_parser_1.protocol; } });\n/**\n * `connect`.\n *\n * @param {String} uri\n * @public\n */\nexports.connect = lookup;\n/**\n * Expose constructors for standalone build.\n *\n * @public\n */\nvar manager_2 = require(\"./manager\");\nObject.defineProperty(exports, \"Manager\", { enumerable: true, get: function () { return manager_2.Manager; } });\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.url = void 0;\nconst parseuri = require(\"parseuri\");\n\n\n/**\n * URL parser.\n *\n * @param uri - url\n * @param loc - An object meant to mimic window.location.\n * Defaults to window.location.\n * @public\n */\nfunction url(uri, loc) {\n let obj = uri;\n // default to window.location\n loc = loc || (typeof location !== \"undefined\" && location);\n if (null == uri)\n uri = loc.protocol + \"//\" + loc.host;\n // relative path support\n if (typeof uri === \"string\") {\n if (\"/\" === uri.charAt(0)) {\n if (\"/\" === uri.charAt(1)) {\n uri = loc.protocol + uri;\n }\n else {\n uri = loc.host + uri;\n }\n }\n if (!/^(https?|wss?):\\/\\//.test(uri)) {\n\n\n if (\"undefined\" !== typeof loc) {\n uri = loc.protocol + \"//\" + uri;\n }\n else {\n uri = \"https://\" + uri;\n }\n }\n // parse\n\n\n obj = parseuri(uri);\n }\n // make sure we treat `localhost:80` and `localhost` equally\n if (!obj.port) {\n if (/^(http|ws)$/.test(obj.protocol)) {\n obj.port = \"80\";\n }\n else if (/^(http|ws)s$/.test(obj.protocol)) {\n obj.port = \"443\";\n }\n }\n obj.path = obj.path || \"/\";\n const ipv6 = obj.host.indexOf(\":\") !== -1;\n const host = ipv6 ? \"[\" + obj.host + \"]\" : obj.host;\n // define unique id\n obj.id = obj.protocol + \"://\" + host + \":\" + obj.port;\n // define href\n obj.href =\n obj.protocol +\n \"://\" +\n host +\n (loc && loc.port === obj.port ? \"\" : \":\" + obj.port);\n return obj;\n}\nexports.url = url;\n","const Socket = require(\"./socket\");\n\nmodule.exports = (uri, opts) => new Socket(uri, opts);\n\n/**\n * Expose deps for legacy compatibility\n * and standalone browser access.\n */\n\nmodule.exports.Socket = Socket;\nmodule.exports.protocol = Socket.protocol; // this is an int\nmodule.exports.Transport = require(\"./transport\");\nmodule.exports.transports = require(\"./transports/index\");\nmodule.exports.parser = require(\"engine.io-parser\");\n","const transports = require(\"./transports/index\");\nconst Emitter = require(\"component-emitter\");\n\n\nconst parser = require(\"engine.io-parser\");\nconst parseuri = require(\"parseuri\");\nconst parseqs = require(\"parseqs\");\n\nclass Socket extends Emitter {\n /**\n * Socket constructor.\n *\n * @param {String|Object} uri or options\n * @param {Object} options\n * @api public\n */\n constructor(uri, opts = {}) {\n super();\n\n if (uri && \"object\" === typeof uri) {\n opts = uri;\n uri = null;\n }\n\n if (uri) {\n uri = parseuri(uri);\n opts.hostname = uri.host;\n opts.secure = uri.protocol === \"https\" || uri.protocol === \"wss\";\n opts.port = uri.port;\n if (uri.query) opts.query = uri.query;\n } else if (opts.host) {\n opts.hostname = parseuri(opts.host).host;\n }\n\n this.secure =\n null != opts.secure\n ? opts.secure\n : typeof location !== \"undefined\" && \"https:\" === location.protocol;\n\n if (opts.hostname && !opts.port) {\n // if no port is specified manually, use the protocol default\n opts.port = this.secure ? \"443\" : \"80\";\n }\n\n this.hostname =\n opts.hostname ||\n (typeof location !== \"undefined\" ? location.hostname : \"localhost\");\n this.port =\n opts.port ||\n (typeof location !== \"undefined\" && location.port\n ? location.port\n : this.secure\n ? 443\n : 80);\n\n this.transports = opts.transports || [\"polling\", \"websocket\"];\n this.readyState = \"\";\n this.writeBuffer = [];\n this.prevBufferLen = 0;\n\n this.opts = Object.assign(\n {\n path: \"/engine.io\",\n agent: false,\n withCredentials: false,\n upgrade: true,\n jsonp: true,\n timestampParam: \"t\",\n rememberUpgrade: false,\n rejectUnauthorized: true,\n perMessageDeflate: {\n threshold: 1024\n },\n transportOptions: {}\n },\n opts\n );\n\n this.opts.path = this.opts.path.replace(/\\/$/, \"\") + \"/\";\n\n if (typeof this.opts.query === \"string\") {\n this.opts.query = parseqs.decode(this.opts.query);\n }\n\n // set on handshake\n this.id = null;\n this.upgrades = null;\n this.pingInterval = null;\n this.pingTimeout = null;\n\n // set on heartbeat\n this.pingTimeoutTimer = null;\n\n this.open();\n }\n\n /**\n * Creates transport of the given type.\n *\n * @param {String} transport name\n * @return {Transport}\n * @api private\n */\n createTransport(name) {\n\n\n const query = clone(this.opts.query);\n\n // append engine.io protocol identifier\n query.EIO = parser.protocol;\n\n // transport name\n query.transport = name;\n\n // session id if we already have one\n if (this.id) query.sid = this.id;\n\n const opts = Object.assign(\n {},\n this.opts.transportOptions[name],\n this.opts,\n {\n query,\n socket: this,\n hostname: this.hostname,\n secure: this.secure,\n port: this.port\n }\n );\n\n\n\n\n return new transports[name](opts);\n }\n\n /**\n * Initializes transport to use and starts probe.\n *\n * @api private\n */\n open() {\n let transport;\n if (\n this.opts.rememberUpgrade &&\n Socket.priorWebsocketSuccess &&\n this.transports.indexOf(\"websocket\") !== -1\n ) {\n transport = \"websocket\";\n } else if (0 === this.transports.length) {\n // Emit error on next tick so it can be listened to\n const self = this;\n setTimeout(function() {\n self.emit(\"error\", \"No transports available\");\n }, 0);\n return;\n } else {\n transport = this.transports[0];\n }\n this.readyState = \"opening\";\n\n // Retry with the next transport if the transport is disabled (jsonp: false)\n try {\n transport = this.createTransport(transport);\n } catch (e) {\n\n\n this.transports.shift();\n this.open();\n return;\n }\n\n transport.open();\n this.setTransport(transport);\n }\n\n /**\n * Sets the current transport. Disables the existing one (if any).\n *\n * @api private\n */\n setTransport(transport) {\n\n\n const self = this;\n\n if (this.transport) {\n\n\n this.transport.removeAllListeners();\n }\n\n // set up transport\n this.transport = transport;\n\n // set up transport listeners\n transport\n .on(\"drain\", function() {\n self.onDrain();\n })\n .on(\"packet\", function(packet) {\n self.onPacket(packet);\n })\n .on(\"error\", function(e) {\n self.onError(e);\n })\n .on(\"close\", function() {\n self.onClose(\"transport close\");\n });\n }\n\n /**\n * Probes a transport.\n *\n * @param {String} transport name\n * @api private\n */\n probe(name) {\n\n\n let transport = this.createTransport(name, { probe: 1 });\n let failed = false;\n const self = this;\n\n Socket.priorWebsocketSuccess = false;\n\n function onTransportOpen() {\n if (self.onlyBinaryUpgrades) {\n const upgradeLosesBinary =\n !this.supportsBinary && self.transport.supportsBinary;\n failed = failed || upgradeLosesBinary;\n }\n if (failed) return;\n\n\n\n transport.send([{ type: \"ping\", data: \"probe\" }]);\n transport.once(\"packet\", function(msg) {\n if (failed) return;\n if (\"pong\" === msg.type && \"probe\" === msg.data) {\n\n\n self.upgrading = true;\n self.emit(\"upgrading\", transport);\n if (!transport) return;\n Socket.priorWebsocketSuccess = \"websocket\" === transport.name;\n\n\n\n self.transport.pause(function() {\n if (failed) return;\n if (\"closed\" === self.readyState) return;\n\n\n\n cleanup();\n\n self.setTransport(transport);\n transport.send([{ type: \"upgrade\" }]);\n self.emit(\"upgrade\", transport);\n transport = null;\n self.upgrading = false;\n self.flush();\n });\n } else {\n\n\n const err = new Error(\"probe error\");\n err.transport = transport.name;\n self.emit(\"upgradeError\", err);\n }\n });\n }\n\n function freezeTransport() {\n if (failed) return;\n\n // Any callback called by transport should be ignored since now\n failed = true;\n\n cleanup();\n\n transport.close();\n transport = null;\n }\n\n // Handle any error that happens while probing\n function onerror(err) {\n const error = new Error(\"probe error: \" + err);\n error.transport = transport.name;\n\n freezeTransport();\n\n\n\n\n self.emit(\"upgradeError\", error);\n }\n\n function onTransportClose() {\n onerror(\"transport closed\");\n }\n\n // When the socket is closed while we're probing\n function onclose() {\n onerror(\"socket closed\");\n }\n\n // When the socket is upgraded while we're probing\n function onupgrade(to) {\n if (transport && to.name !== transport.name) {\n\n\n freezeTransport();\n }\n }\n\n // Remove all listeners on the transport and on self\n function cleanup() {\n transport.removeListener(\"open\", onTransportOpen);\n transport.removeListener(\"error\", onerror);\n transport.removeListener(\"close\", onTransportClose);\n self.removeListener(\"close\", onclose);\n self.removeListener(\"upgrading\", onupgrade);\n }\n\n transport.once(\"open\", onTransportOpen);\n transport.once(\"error\", onerror);\n transport.once(\"close\", onTransportClose);\n\n this.once(\"close\", onclose);\n this.once(\"upgrading\", onupgrade);\n\n transport.open();\n }\n\n /**\n * Called when connection is deemed open.\n *\n * @api public\n */\n onOpen() {\n\n\n this.readyState = \"open\";\n Socket.priorWebsocketSuccess = \"websocket\" === this.transport.name;\n this.emit(\"open\");\n this.flush();\n\n // we check for `readyState` in case an `open`\n // listener already closed the socket\n if (\n \"open\" === this.readyState &&\n this.opts.upgrade &&\n this.transport.pause\n ) {\n\n\n let i = 0;\n const l = this.upgrades.length;\n for (; i < l; i++) {\n this.probe(this.upgrades[i]);\n }\n }\n }\n\n /**\n * Handles a packet.\n *\n * @api private\n */\n onPacket(packet) {\n if (\n \"opening\" === this.readyState ||\n \"open\" === this.readyState ||\n \"closing\" === this.readyState\n ) {\n\n\n\n this.emit(\"packet\", packet);\n\n // Socket is live - any packet counts\n this.emit(\"heartbeat\");\n\n switch (packet.type) {\n case \"open\":\n this.onHandshake(JSON.parse(packet.data));\n break;\n\n case \"ping\":\n this.resetPingTimeout();\n this.sendPacket(\"pong\");\n this.emit(\"pong\");\n break;\n\n case \"error\":\n const err = new Error(\"server error\");\n err.code = packet.data;\n this.onError(err);\n break;\n\n case \"message\":\n this.emit(\"data\", packet.data);\n this.emit(\"message\", packet.data);\n break;\n }\n } else {\n\n\n }\n }\n\n /**\n * Called upon handshake completion.\n *\n * @param {Object} handshake obj\n * @api private\n */\n onHandshake(data) {\n this.emit(\"handshake\", data);\n this.id = data.sid;\n this.transport.query.sid = data.sid;\n this.upgrades = this.filterUpgrades(data.upgrades);\n this.pingInterval = data.pingInterval;\n this.pingTimeout = data.pingTimeout;\n this.onOpen();\n // In case open handler closes socket\n if (\"closed\" === this.readyState) return;\n this.resetPingTimeout();\n }\n\n /**\n * Sets and resets ping timeout timer based on server pings.\n *\n * @api private\n */\n resetPingTimeout() {\n clearTimeout(this.pingTimeoutTimer);\n this.pingTimeoutTimer = setTimeout(() => {\n this.onClose(\"ping timeout\");\n }, this.pingInterval + this.pingTimeout);\n }\n\n /**\n * Called on `drain` event\n *\n * @api private\n */\n onDrain() {\n this.writeBuffer.splice(0, this.prevBufferLen);\n\n // setting prevBufferLen = 0 is very important\n // for example, when upgrading, upgrade packet is sent over,\n // and a nonzero prevBufferLen could cause problems on `drain`\n this.prevBufferLen = 0;\n\n if (0 === this.writeBuffer.length) {\n this.emit(\"drain\");\n } else {\n this.flush();\n }\n }\n\n /**\n * Flush write buffers.\n *\n * @api private\n */\n flush() {\n if (\n \"closed\" !== this.readyState &&\n this.transport.writable &&\n !this.upgrading &&\n this.writeBuffer.length\n ) {\n\n\n this.transport.send(this.writeBuffer);\n // keep track of current length of writeBuffer\n // splice writeBuffer and callbackBuffer on `drain`\n this.prevBufferLen = this.writeBuffer.length;\n this.emit(\"flush\");\n }\n }\n\n /**\n * Sends a message.\n *\n * @param {String} message.\n * @param {Function} callback function.\n * @param {Object} options.\n * @return {Socket} for chaining.\n * @api public\n */\n write(msg, options, fn) {\n this.sendPacket(\"message\", msg, options, fn);\n return this;\n }\n\n send(msg, options, fn) {\n this.sendPacket(\"message\", msg, options, fn);\n return this;\n }\n\n /**\n * Sends a packet.\n *\n * @param {String} packet type.\n * @param {String} data.\n * @param {Object} options.\n * @param {Function} callback function.\n * @api private\n */\n sendPacket(type, data, options, fn) {\n if (\"function\" === typeof data) {\n fn = data;\n data = undefined;\n }\n\n if (\"function\" === typeof options) {\n fn = options;\n options = null;\n }\n\n if (\"closing\" === this.readyState || \"closed\" === this.readyState) {\n return;\n }\n\n options = options || {};\n options.compress = false !== options.compress;\n\n const packet = {\n type: type,\n data: data,\n options: options\n };\n this.emit(\"packetCreate\", packet);\n this.writeBuffer.push(packet);\n if (fn) this.once(\"flush\", fn);\n this.flush();\n }\n\n /**\n * Closes the connection.\n *\n * @api private\n */\n close() {\n const self = this;\n\n if (\"opening\" === this.readyState || \"open\" === this.readyState) {\n this.readyState = \"closing\";\n\n if (this.writeBuffer.length) {\n this.once(\"drain\", function() {\n if (this.upgrading) {\n waitForUpgrade();\n } else {\n close();\n }\n });\n } else if (this.upgrading) {\n waitForUpgrade();\n } else {\n close();\n }\n }\n\n function close() {\n self.onClose(\"forced close\");\n\n\n self.transport.close();\n }\n\n function cleanupAndClose() {\n self.removeListener(\"upgrade\", cleanupAndClose);\n self.removeListener(\"upgradeError\", cleanupAndClose);\n close();\n }\n\n function waitForUpgrade() {\n // wait for upgrade to finish since we can't send packets while pausing a transport\n self.once(\"upgrade\", cleanupAndClose);\n self.once(\"upgradeError\", cleanupAndClose);\n }\n\n return this;\n }\n\n /**\n * Called upon transport error\n *\n * @api private\n */\n onError(err) {\n\n\n Socket.priorWebsocketSuccess = false;\n this.emit(\"error\", err);\n this.onClose(\"transport error\", err);\n }\n\n /**\n * Called upon transport close.\n *\n * @api private\n */\n onClose(reason, desc) {\n if (\n \"opening\" === this.readyState ||\n \"open\" === this.readyState ||\n \"closing\" === this.readyState\n ) {\n\n\n const self = this;\n\n // clear timers\n clearTimeout(this.pingIntervalTimer);\n clearTimeout(this.pingTimeoutTimer);\n\n // stop event from firing again for transport\n this.transport.removeAllListeners(\"close\");\n\n // ensure transport won't stay open\n this.transport.close();\n\n // ignore further transport communication\n this.transport.removeAllListeners();\n\n // set ready state\n this.readyState = \"closed\";\n\n // clear session id\n this.id = null;\n\n // emit close event\n this.emit(\"close\", reason, desc);\n\n // clean buffers after, so users can still\n // grab the buffers on `close` event\n self.writeBuffer = [];\n self.prevBufferLen = 0;\n }\n }\n\n /**\n * Filters upgrades, returning only those matching client transports.\n *\n * @param {Array} server upgrades\n * @api private\n *\n */\n filterUpgrades(upgrades) {\n const filteredUpgrades = [];\n let i = 0;\n const j = upgrades.length;\n for (; i < j; i++) {\n if (~this.transports.indexOf(upgrades[i]))\n filteredUpgrades.push(upgrades[i]);\n }\n return filteredUpgrades;\n }\n}\n\nSocket.priorWebsocketSuccess = false;\n\n/**\n * Protocol version.\n *\n * @api public\n */\n\nSocket.protocol = parser.protocol; // this is an int\n\nfunction clone(obj) {\n const o = {};\n for (let i in obj) {\n if (obj.hasOwnProperty(i)) {\n o[i] = obj[i];\n }\n }\n return o;\n}\n\nmodule.exports = Socket;\n","\n/**\n * Module exports.\n *\n * Logic borrowed from Modernizr:\n *\n * - https://github.com/Modernizr/Modernizr/blob/master/feature-detects/cors.js\n */\n\ntry {\n module.exports = typeof XMLHttpRequest !== 'undefined' &&\n 'withCredentials' in new XMLHttpRequest();\n} catch (err) {\n // if XMLHttp support is disabled in IE then it will throw\n // when trying to create\n module.exports = false;\n}\n","/* global attachEvent */\n\nconst XMLHttpRequest = require(\"xmlhttprequest-ssl\");\nconst Polling = require(\"./polling\");\nconst Emitter = require(\"component-emitter\");\nconst { pick } = require(\"../util\");\nconst globalThis = require(\"../globalThis\");\n\n\n\n\n/**\n * Empty function\n */\n\nfunction empty() {}\n\nconst hasXHR2 = (function() {\n const xhr = new XMLHttpRequest({ xdomain: false });\n return null != xhr.responseType;\n})();\n\nclass XHR extends Polling {\n /**\n * XHR Polling constructor.\n *\n * @param {Object} opts\n * @api public\n */\n constructor(opts) {\n super(opts);\n\n if (typeof location !== \"undefined\") {\n const isSSL = \"https:\" === location.protocol;\n let port = location.port;\n\n // some user agents have empty `location.port`\n if (!port) {\n port = isSSL ? 443 : 80;\n }\n\n this.xd =\n (typeof location !== \"undefined\" &&\n opts.hostname !== location.hostname) ||\n port !== opts.port;\n this.xs = opts.secure !== isSSL;\n }\n /**\n * XHR supports binary\n */\n const forceBase64 = opts && opts.forceBase64;\n this.supportsBinary = hasXHR2 && !forceBase64;\n }\n\n /**\n * Creates a request.\n *\n * @param {String} method\n * @api private\n */\n request(opts = {}) {\n Object.assign(opts, { xd: this.xd, xs: this.xs }, this.opts);\n return new Request(this.uri(), opts);\n }\n\n /**\n * Sends data.\n *\n * @param {String} data to send.\n * @param {Function} called upon flush.\n * @api private\n */\n doWrite(data, fn) {\n const req = this.request({\n method: \"POST\",\n data: data\n });\n const self = this;\n req.on(\"success\", fn);\n req.on(\"error\", function(err) {\n self.onError(\"xhr post error\", err);\n });\n }\n\n /**\n * Starts a poll cycle.\n *\n * @api private\n */\n doPoll() {\n\n\n const req = this.request();\n const self = this;\n req.on(\"data\", function(data) {\n self.onData(data);\n });\n req.on(\"error\", function(err) {\n self.onError(\"xhr poll error\", err);\n });\n this.pollXhr = req;\n }\n}\n\nclass Request extends Emitter {\n /**\n * Request constructor\n *\n * @param {Object} options\n * @api public\n */\n constructor(uri, opts) {\n super();\n this.opts = opts;\n\n this.method = opts.method || \"GET\";\n this.uri = uri;\n this.async = false !== opts.async;\n this.data = undefined !== opts.data ? opts.data : null;\n\n this.create();\n }\n\n /**\n * Creates the XHR object and sends the request.\n *\n * @api private\n */\n create() {\n const opts = pick(\n this.opts,\n \"agent\",\n \"enablesXDR\",\n \"pfx\",\n \"key\",\n \"passphrase\",\n \"cert\",\n \"ca\",\n \"ciphers\",\n \"rejectUnauthorized\"\n );\n opts.xdomain = !!this.opts.xd;\n opts.xscheme = !!this.opts.xs;\n\n const xhr = (this.xhr = new XMLHttpRequest(opts));\n const self = this;\n\n try {\n\n\n xhr.open(this.method, this.uri, this.async);\n try {\n if (this.opts.extraHeaders) {\n xhr.setDisableHeaderCheck && xhr.setDisableHeaderCheck(true);\n for (let i in this.opts.extraHeaders) {\n if (this.opts.extraHeaders.hasOwnProperty(i)) {\n xhr.setRequestHeader(i, this.opts.extraHeaders[i]);\n }\n }\n }\n } catch (e) {}\n\n if (\"POST\" === this.method) {\n try {\n xhr.setRequestHeader(\"Content-type\", \"text/plain;charset=UTF-8\");\n } catch (e) {}\n }\n\n try {\n xhr.setRequestHeader(\"Accept\", \"*/*\");\n } catch (e) {}\n\n // ie6 check\n if (\"withCredentials\" in xhr) {\n xhr.withCredentials = this.opts.withCredentials;\n }\n\n if (this.opts.requestTimeout) {\n xhr.timeout = this.opts.requestTimeout;\n }\n\n if (this.hasXDR()) {\n xhr.onload = function() {\n self.onLoad();\n };\n xhr.onerror = function() {\n self.onError(xhr.responseText);\n };\n } else {\n xhr.onreadystatechange = function() {\n if (4 !== xhr.readyState) return;\n if (200 === xhr.status || 1223 === xhr.status) {\n self.onLoad();\n } else {\n // make sure the `error` event handler that's user-set\n // does not throw in the same tick and gets caught here\n setTimeout(function() {\n self.onError(typeof xhr.status === \"number\" ? xhr.status : 0);\n }, 0);\n }\n };\n }\n\n\n\n xhr.send(this.data);\n } catch (e) {\n // Need to defer since .create() is called directly from the constructor\n // and thus the 'error' event can only be only bound *after* this exception\n // occurs. Therefore, also, we cannot throw here at all.\n setTimeout(function() {\n self.onError(e);\n }, 0);\n return;\n }\n\n if (typeof document !== \"undefined\") {\n this.index = Request.requestsCount++;\n Request.requests[this.index] = this;\n }\n }\n\n /**\n * Called upon successful response.\n *\n * @api private\n */\n onSuccess() {\n this.emit(\"success\");\n this.cleanup();\n }\n\n /**\n * Called if we have data.\n *\n * @api private\n */\n onData(data) {\n this.emit(\"data\", data);\n this.onSuccess();\n }\n\n /**\n * Called upon error.\n *\n * @api private\n */\n onError(err) {\n this.emit(\"error\", err);\n this.cleanup(true);\n }\n\n /**\n * Cleans up house.\n *\n * @api private\n */\n cleanup(fromError) {\n if (\"undefined\" === typeof this.xhr || null === this.xhr) {\n return;\n }\n // xmlhttprequest\n if (this.hasXDR()) {\n this.xhr.onload = this.xhr.onerror = empty;\n } else {\n this.xhr.onreadystatechange = empty;\n }\n\n if (fromError) {\n try {\n this.xhr.abort();\n } catch (e) {}\n }\n\n if (typeof document !== \"undefined\") {\n delete Request.requests[this.index];\n }\n\n this.xhr = null;\n }\n\n /**\n * Called upon load.\n *\n * @api private\n */\n onLoad() {\n const data = this.xhr.responseText;\n if (data !== null) {\n this.onData(data);\n }\n }\n\n /**\n * Check if it has XDomainRequest.\n *\n * @api private\n */\n hasXDR() {\n return typeof XDomainRequest !== \"undefined\" && !this.xs && this.enablesXDR;\n }\n\n /**\n * Aborts the request.\n *\n * @api public\n */\n abort() {\n this.cleanup();\n }\n}\n\n/**\n * Aborts pending requests when unloading the window. This is needed to prevent\n * memory leaks (e.g. when using IE) and to ensure that no spurious error is\n * emitted.\n */\n\nRequest.requestsCount = 0;\nRequest.requests = {};\n\nif (typeof document !== \"undefined\") {\n if (typeof attachEvent === \"function\") {\n attachEvent(\"onunload\", unloadHandler);\n } else if (typeof addEventListener === \"function\") {\n const terminationEvent = \"onpagehide\" in globalThis ? \"pagehide\" : \"unload\";\n addEventListener(terminationEvent, unloadHandler, false);\n }\n}\n\nfunction unloadHandler() {\n for (let i in Request.requests) {\n if (Request.requests.hasOwnProperty(i)) {\n Request.requests[i].abort();\n }\n }\n}\n\nmodule.exports = XHR;\nmodule.exports.Request = Request;\n","const { PACKET_TYPES } = require(\"./commons\");\n\nconst withNativeBlob =\n typeof Blob === \"function\" ||\n (typeof Blob !== \"undefined\" &&\n Object.prototype.toString.call(Blob) === \"[object BlobConstructor]\");\nconst withNativeArrayBuffer = typeof ArrayBuffer === \"function\";\n\n// ArrayBuffer.isView method is not defined in IE10\nconst isView = obj => {\n return typeof ArrayBuffer.isView === \"function\"\n ? ArrayBuffer.isView(obj)\n : obj && obj.buffer instanceof ArrayBuffer;\n};\n\nconst encodePacket = ({ type, data }, supportsBinary, callback) => {\n if (withNativeBlob && data instanceof Blob) {\n if (supportsBinary) {\n return callback(data);\n } else {\n return encodeBlobAsBase64(data, callback);\n }\n } else if (\n withNativeArrayBuffer &&\n (data instanceof ArrayBuffer || isView(data))\n ) {\n if (supportsBinary) {\n return callback(data instanceof ArrayBuffer ? data : data.buffer);\n } else {\n return encodeBlobAsBase64(new Blob([data]), callback);\n }\n }\n // plain string\n return callback(PACKET_TYPES[type] + (data || \"\"));\n};\n\nconst encodeBlobAsBase64 = (data, callback) => {\n const fileReader = new FileReader();\n fileReader.onload = function() {\n const content = fileReader.result.split(\",\")[1];\n callback(\"b\" + content);\n };\n return fileReader.readAsDataURL(data);\n};\n\nmodule.exports = encodePacket;\n","const { PACKET_TYPES_REVERSE, ERROR_PACKET } = require(\"./commons\");\n\nconst withNativeArrayBuffer = typeof ArrayBuffer === \"function\";\n\nlet base64decoder;\nif (withNativeArrayBuffer) {\n base64decoder = require(\"base64-arraybuffer\");\n}\n\nconst decodePacket = (encodedPacket, binaryType) => {\n if (typeof encodedPacket !== \"string\") {\n return {\n type: \"message\",\n data: mapBinary(encodedPacket, binaryType)\n };\n }\n const type = encodedPacket.charAt(0);\n if (type === \"b\") {\n return {\n type: \"message\",\n data: decodeBase64Packet(encodedPacket.substring(1), binaryType)\n };\n }\n const packetType = PACKET_TYPES_REVERSE[type];\n if (!packetType) {\n return ERROR_PACKET;\n }\n return encodedPacket.length > 1\n ? {\n type: PACKET_TYPES_REVERSE[type],\n data: encodedPacket.substring(1)\n }\n : {\n type: PACKET_TYPES_REVERSE[type]\n };\n};\n\nconst decodeBase64Packet = (data, binaryType) => {\n if (base64decoder) {\n const decoded = base64decoder.decode(data);\n return mapBinary(decoded, binaryType);\n } else {\n return { base64: true, data }; // fallback for old browsers\n }\n};\n\nconst mapBinary = (data, binaryType) => {\n switch (binaryType) {\n case \"blob\":\n return data instanceof ArrayBuffer ? new Blob([data]) : data;\n case \"arraybuffer\":\n default:\n return data; // assuming the data is already an ArrayBuffer\n }\n};\n\nmodule.exports = decodePacket;\n","/*\n * base64-arraybuffer\n * https://github.com/niklasvh/base64-arraybuffer\n *\n * Copyright (c) 2012 Niklas von Hertzen\n * Licensed under the MIT license.\n */\n(function(){\n \"use strict\";\n\n var chars = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/\";\n\n // Use a lookup table to find the index.\n var lookup = new Uint8Array(256);\n for (var i = 0; i < chars.length; i++) {\n lookup[chars.charCodeAt(i)] = i;\n }\n\n exports.encode = function(arraybuffer) {\n var bytes = new Uint8Array(arraybuffer),\n i, len = bytes.length, base64 = \"\";\n\n for (i = 0; i < len; i+=3) {\n base64 += chars[bytes[i] >> 2];\n base64 += chars[((bytes[i] & 3) << 4) | (bytes[i + 1] >> 4)];\n base64 += chars[((bytes[i + 1] & 15) << 2) | (bytes[i + 2] >> 6)];\n base64 += chars[bytes[i + 2] & 63];\n }\n\n if ((len % 3) === 2) {\n base64 = base64.substring(0, base64.length - 1) + \"=\";\n } else if (len % 3 === 1) {\n base64 = base64.substring(0, base64.length - 2) + \"==\";\n }\n\n return base64;\n };\n\n exports.decode = function(base64) {\n var bufferLength = base64.length * 0.75,\n len = base64.length, i, p = 0,\n encoded1, encoded2, encoded3, encoded4;\n\n if (base64[base64.length - 1] === \"=\") {\n bufferLength--;\n if (base64[base64.length - 2] === \"=\") {\n bufferLength--;\n }\n }\n\n var arraybuffer = new ArrayBuffer(bufferLength),\n bytes = new Uint8Array(arraybuffer);\n\n for (i = 0; i < len; i+=4) {\n encoded1 = lookup[base64.charCodeAt(i)];\n encoded2 = lookup[base64.charCodeAt(i+1)];\n encoded3 = lookup[base64.charCodeAt(i+2)];\n encoded4 = lookup[base64.charCodeAt(i+3)];\n\n bytes[p++] = (encoded1 << 2) | (encoded2 >> 4);\n bytes[p++] = ((encoded2 & 15) << 4) | (encoded3 >> 2);\n bytes[p++] = ((encoded3 & 3) << 6) | (encoded4 & 63);\n }\n\n return arraybuffer;\n };\n})();\n","const Polling = require(\"./polling\");\nconst globalThis = require(\"../globalThis\");\n\nconst rNewline = /\\n/g;\nconst rEscapedNewline = /\\\\n/g;\n\n/**\n * Global JSONP callbacks.\n */\n\nlet callbacks;\n\n/**\n * Noop.\n */\n\nfunction empty() {}\n\nclass JSONPPolling extends Polling {\n /**\n * JSONP Polling constructor.\n *\n * @param {Object} opts.\n * @api public\n */\n constructor(opts) {\n super(opts);\n\n this.query = this.query || {};\n\n // define global callbacks array if not present\n // we do this here (lazily) to avoid unneeded global pollution\n if (!callbacks) {\n // we need to consider multiple engines in the same page\n callbacks = globalThis.___eio = globalThis.___eio || [];\n }\n\n // callback identifier\n this.index = callbacks.length;\n\n // add callback to jsonp global\n const self = this;\n callbacks.push(function(msg) {\n self.onData(msg);\n });\n\n // append to query string\n this.query.j = this.index;\n\n // prevent spurious errors from being emitted when the window is unloaded\n if (typeof addEventListener === \"function\") {\n addEventListener(\n \"beforeunload\",\n function() {\n if (self.script) self.script.onerror = empty;\n },\n false\n );\n }\n }\n\n /**\n * JSONP only supports binary as base64 encoded strings\n */\n get supportsBinary() {\n return false;\n }\n\n /**\n * Closes the socket.\n *\n * @api private\n */\n doClose() {\n if (this.script) {\n this.script.parentNode.removeChild(this.script);\n this.script = null;\n }\n\n if (this.form) {\n this.form.parentNode.removeChild(this.form);\n this.form = null;\n this.iframe = null;\n }\n\n super.doClose();\n }\n\n /**\n * Starts a poll cycle.\n *\n * @api private\n */\n doPoll() {\n const self = this;\n const script = document.createElement(\"script\");\n\n if (this.script) {\n this.script.parentNode.removeChild(this.script);\n this.script = null;\n }\n\n script.async = true;\n script.src = this.uri();\n script.onerror = function(e) {\n self.onError(\"jsonp poll error\", e);\n };\n\n const insertAt = document.getElementsByTagName(\"script\")[0];\n if (insertAt) {\n insertAt.parentNode.insertBefore(script, insertAt);\n } else {\n (document.head || document.body).appendChild(script);\n }\n this.script = script;\n\n const isUAgecko =\n \"undefined\" !== typeof navigator && /gecko/i.test(navigator.userAgent);\n\n if (isUAgecko) {\n setTimeout(function() {\n const iframe = document.createElement(\"iframe\");\n document.body.appendChild(iframe);\n document.body.removeChild(iframe);\n }, 100);\n }\n }\n\n /**\n * Writes with a hidden iframe.\n *\n * @param {String} data to send\n * @param {Function} called upon flush.\n * @api private\n */\n doWrite(data, fn) {\n const self = this;\n let iframe;\n\n if (!this.form) {\n const form = document.createElement(\"form\");\n const area = document.createElement(\"textarea\");\n const id = (this.iframeId = \"eio_iframe_\" + this.index);\n\n form.className = \"socketio\";\n form.style.position = \"absolute\";\n form.style.top = \"-1000px\";\n form.style.left = \"-1000px\";\n form.target = id;\n form.method = \"POST\";\n form.setAttribute(\"accept-charset\", \"utf-8\");\n area.name = \"d\";\n form.appendChild(area);\n document.body.appendChild(form);\n\n this.form = form;\n this.area = area;\n }\n\n this.form.action = this.uri();\n\n function complete() {\n initIframe();\n fn();\n }\n\n function initIframe() {\n if (self.iframe) {\n try {\n self.form.removeChild(self.iframe);\n } catch (e) {\n self.onError(\"jsonp polling iframe removal error\", e);\n }\n }\n\n try {\n // ie6 dynamic iframes with target=\"\" support (thanks Chris Lambacher)\n const html = '
                                        Service
                                        + file_downloadNothing here... +

                                        No ressource available (yet).

                                        +
                                        book
                                        + delete + search + send +
                                        + delete + file_download + send +

                                        + delete + send +
                                        + file_download +
                                        + file_download +



                                        + delete + send + search +