diff --git a/.env.tpl b/.env.tpl
index 54165fce..25884399 100644
--- a/.env.tpl
+++ b/.env.tpl
@@ -39,7 +39,7 @@ HOST_DOCKER_GID=
# HINT: Use this bash command `python -c "import uuid; print(uuid.uuid4().hex)"`
# SECRET_KEY=
-# Example: nopaque.example.com nopaque.example.com:5000
+# Example: nopaque.example.com/nopaque.example.com:5000
# HINT: If your instance is publicly available on a different Port then 80/443,
# you will have to add this to the server name
SERVER_NAME=
@@ -124,9 +124,12 @@ NOPAQUE_ADMIN=
# DEFAULT: False
# NOPAQUE_DAEMON_ENABLED=
+# The hostname or IP address for the server to listen on.
+# HINT: To use a domain locally, add any names that should route to the app to your hosts file.
# DEFAULT: 0.0.0.0
# NOPAQUE_HOST=
+# The port number for the server to listen on.
# DEFAULT: 5000
# NOPAQUE_PORT=
diff --git a/daemon/.dockerignore b/daemon/.dockerignore
deleted file mode 100644
index 21803000..00000000
--- a/daemon/.dockerignore
+++ /dev/null
@@ -1,6 +0,0 @@
-# Docker related files
-Dockerfile
-.dockerignore
-
-# Packages
-__pycache__
diff --git a/daemon/Dockerfile b/daemon/Dockerfile
deleted file mode 100644
index be9a5d74..00000000
--- a/daemon/Dockerfile
+++ /dev/null
@@ -1,32 +0,0 @@
-FROM python:3.9.0-slim-buster
-
-
-LABEL authors="Patrick Jentsch
, Stephan Porada "
-
-
-ARG DOCKER_GID
-ARG GID
-ARG UID
-ENV LANG=C.UTF-8
-
-
-RUN apt-get update \
- && apt-get install --no-install-recommends --yes \
- build-essential \
- libpq-dev \
- && rm -r /var/lib/apt/lists/*
-
-
-RUN groupadd --gid ${DOCKER_GID} --system docker \
- && groupadd --gid ${GID} --system nopaqued \
- && useradd --create-home --gid ${GID} --groups ${DOCKER_GID} --no-log-init --system --uid ${UID} nopaqued
-USER nopaqued
-WORKDIR /home/nopaqued
-
-
-COPY --chown=nopaqued:nopaqued [".", "."]
-RUN python -m venv venv \
- && venv/bin/pip install --requirement requirements.txt
-
-
-ENTRYPOINT ["./boot.sh"]
diff --git a/daemon/app/__init__.py b/daemon/app/__init__.py
deleted file mode 100644
index eaccafd2..00000000
--- a/daemon/app/__init__.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from config import config
-from sqlalchemy import create_engine
-from sqlalchemy.orm import scoped_session, sessionmaker
-from time import sleep
-import docker
-import os
-
-
-configuration = config[os.environ.get('NOPAQUE_CONFIG', 'development')]
-configuration.init()
-docker_client = docker.from_env()
-engine = create_engine(configuration.SQLALCHEMY_DATABASE_URI)
-Session = scoped_session(sessionmaker(bind=engine))
-
-
-def run():
- from .tasks.check_corpora import check_corpora
- check_corpora_thread = check_corpora()
- from .tasks.check_jobs import check_jobs
- check_jobs_thread = check_jobs()
- from .tasks.notify import notify
- notify_thread = notify()
-
- while True:
- if not check_corpora_thread.is_alive():
- check_corpora_thread = check_corpora()
- if not check_jobs_thread.is_alive():
- check_jobs_thread = check_jobs()
- if not notify_thread.is_alive():
- notify_thread = notify()
- sleep(3)
diff --git a/daemon/app/decorators.py b/daemon/app/decorators.py
deleted file mode 100644
index 040250a8..00000000
--- a/daemon/app/decorators.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from functools import wraps
-from threading import Thread
-
-
-def background(f):
- '''
- ' This decorator executes a function in a Thread.
- '''
- @wraps(f)
- def wrapped(*args, **kwargs):
- thread = Thread(target=f, args=args, kwargs=kwargs)
- thread.start()
- return thread
- return wrapped
diff --git a/daemon/app/models.py b/daemon/app/models.py
deleted file mode 100644
index 1f113142..00000000
--- a/daemon/app/models.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from sqlalchemy.ext.automap import automap_base
-from sqlalchemy.orm import relationship
-from . import engine
-
-
-Base = automap_base()
-
-
-# Classes for database models
-class Corpus(Base):
- __tablename__ = 'corpora'
- files = relationship('CorpusFile', collection_class=set)
-
-
-class CorpusFile(Base):
- __tablename__ = 'corpus_files'
-
-
-class Job(Base):
- __tablename__ = 'jobs'
- inputs = relationship('JobInput', collection_class=set)
- results = relationship('JobResult', collection_class=set)
- notification_data = relationship('NotificationData', collection_class=list)
- notification_email_data = relationship('NotificationEmailData',
- collection_class=list)
-
-
-class JobInput(Base):
- __tablename__ = 'job_results'
-
-
-class JobResult(Base):
- __tablename__ = 'job_results'
-
-
-class NotificationData(Base):
- __tablename__ = 'notification_data'
- job = relationship('Job', collection_class=set)
-
-
-class NotificationEmailData(Base):
- __tablename__ = 'notification_email_data'
- job = relationship('Job', collection_class=set)
-
-
-class User(Base):
- __tablename__ = 'users'
- jobs = relationship('Job', collection_class=set)
- corpora = relationship('Corpus', collection_class=set)
-
-
-Base.prepare(engine, reflect=True)
diff --git a/daemon/app/tasks/__init__.py b/daemon/app/tasks/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/daemon/app/tasks/check_corpora.py b/daemon/app/tasks/check_corpora.py
deleted file mode 100644
index 6ecffea5..00000000
--- a/daemon/app/tasks/check_corpora.py
+++ /dev/null
@@ -1,140 +0,0 @@
-from .. import configuration as config
-from .. import docker_client, Session
-from ..decorators import background
-from ..models import Corpus
-import docker
-import logging
-import os
-import shutil
-
-
-@background
-def check_corpora():
- session = Session()
- corpora = session.query(Corpus).all()
- for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora):
- __create_build_corpus_service(corpus)
- for corpus in filter(lambda corpus: (corpus.status == 'queued'
- or corpus.status == 'running'),
- corpora):
- __checkout_build_corpus_service(corpus)
- for corpus in filter(lambda corpus: corpus.status == 'start analysis',
- corpora):
- __create_cqpserver_container(corpus)
- for corpus in filter(lambda corpus: corpus.status == 'stop analysis',
- corpora):
- __remove_cqpserver_container(corpus)
- session.commit()
- Session.remove()
-
-
-def __create_build_corpus_service(corpus):
- corpus_dir = os.path.join(config.DATA_DIR,
- str(corpus.user_id),
- 'corpora',
- str(corpus.id))
- corpus_data_dir = os.path.join(corpus_dir, 'data')
- corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt')
- corpus_registry_dir = os.path.join(corpus_dir, 'registry')
- if os.path.exists(corpus_data_dir):
- shutil.rmtree(corpus_data_dir)
- if os.path.exists(corpus_registry_dir):
- shutil.rmtree(corpus_registry_dir)
- os.mkdir(corpus_data_dir)
- os.mkdir(corpus_registry_dir)
- service_args = {'command': 'docker-entrypoint.sh build-corpus',
- 'constraints': ['node.role==worker'],
- 'labels': {'origin': 'nopaque',
- 'type': 'corpus.prepare',
- 'corpus_id': str(corpus.id)},
- 'mounts': [corpus_file + ':/root/files/corpus.vrt:ro',
- corpus_data_dir + ':/corpora/data:rw',
- corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
- 'name': 'build-corpus_{}'.format(corpus.id),
- 'restart_policy': docker.types.RestartPolicy()}
- service_image = \
- 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
- try:
- service = docker_client.services.get(service_args['name'])
- except docker.errors.NotFound:
- pass
- except docker.errors.DockerException:
- return
- else:
- service.remove()
- try:
- docker_client.services.create(service_image, **service_args)
- except docker.errors.DockerException:
- corpus.status = 'failed'
- else:
- corpus.status = 'queued'
-
-
-def __checkout_build_corpus_service(corpus):
- service_name = 'build-corpus_{}'.format(corpus.id)
- try:
- service = docker_client.services.get(service_name)
- except docker.errors.NotFound:
- logging.error('__checkout_build_corpus_service({}):'.format(corpus.id)
- + ' The service does not exist.'
- + ' (stauts: {} -> failed)'.format(corpus.status))
- corpus.status = 'failed'
- return
- except docker.errors.DockerException:
- return
- service_tasks = service.tasks()
- if not service_tasks:
- return
- task_state = service_tasks[0].get('Status').get('State')
- if corpus.status == 'queued' and task_state != 'pending':
- corpus.status = 'running'
- elif corpus.status == 'running' and task_state == 'complete':
- service.remove()
- corpus.status = 'prepared'
- elif corpus.status == 'running' and task_state == 'failed':
- service.remove()
- corpus.status = task_state
-
-
-def __create_cqpserver_container(corpus):
- corpus_dir = os.path.join(config.DATA_DIR,
- str(corpus.user_id),
- 'corpora',
- str(corpus.id))
- corpus_data_dir = os.path.join(corpus_dir, 'data')
- corpus_registry_dir = os.path.join(corpus_dir, 'registry')
- container_args = {'command': 'cqpserver',
- 'detach': True,
- 'volumes': [corpus_data_dir + ':/corpora/data:rw',
- corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
- 'name': 'cqpserver_{}'.format(corpus.id),
- 'network': 'nopaque_default'}
- container_image = \
- 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
- try:
- container = docker_client.containers.get(container_args['name'])
- except docker.errors.NotFound:
- pass
- except docker.errors.DockerException:
- return
- else:
- container.remove(force=True)
- try:
- docker_client.containers.run(container_image, **container_args)
- except docker.errors.DockerException:
- return
- else:
- corpus.status = 'analysing'
-
-
-def __remove_cqpserver_container(corpus):
- container_name = 'cqpserver_{}'.format(corpus.id)
- try:
- container = docker_client.containers.get(container_name)
- except docker.errors.NotFound:
- pass
- except docker.errors.DockerException:
- return
- else:
- container.remove(force=True)
- corpus.status = 'prepared'
diff --git a/daemon/app/tasks/check_jobs.py b/daemon/app/tasks/check_jobs.py
deleted file mode 100644
index f5530e1e..00000000
--- a/daemon/app/tasks/check_jobs.py
+++ /dev/null
@@ -1,147 +0,0 @@
-from datetime import datetime
-from .. import configuration as config
-from .. import docker_client, Session
-from ..decorators import background
-from ..models import Job, JobResult, NotificationData, NotificationEmailData
-import docker
-import logging
-import json
-import os
-
-
-@background
-def check_jobs():
- session = Session()
- jobs = session.query(Job).all()
- for job in filter(lambda job: job.status == 'submitted', jobs):
- __create_job_service(job)
- for job in filter(lambda job: job.status == 'queued', jobs):
- __checkout_job_service(job, session)
- __add_notification_data(job, 'queued', session)
- for job in filter(lambda job: job.status == 'running', jobs):
- __checkout_job_service(job, session)
- __add_notification_data(job, 'running', session)
- for job in filter(lambda job: job.status == 'complete', jobs):
- __add_notification_data(job, 'complete', session)
- for job in filter(lambda job: job.status == 'failed', jobs):
- __add_notification_data(job, 'failed', session)
- for job in filter(lambda job: job.status == 'canceling', jobs):
- __remove_job_service(job)
- session.commit()
- Session.remove()
-
-
-def __add_notification_data(job, notified_on_status, session):
- # checks if user wants any notifications at all
- if (job.user.setting_job_status_mail_notifications == 'none'):
- return
- # checks if user wants only notification on completed jobs
- elif (job.user.setting_job_status_mail_notifications == 'end'
- and notified_on_status != 'complete'):
- return
- else:
- # check if a job already has associated NotificationData
- notification_exists = len(job.notification_data)
- # create notification_data for current job if there is none
- if (notification_exists == 0):
- notification_data = NotificationData(job_id=job.id)
- session.add(notification_data)
- # If no commit job will have no NotificationData
- session.commit()
- if (job.notification_data[0].notified_on != notified_on_status):
- notification_email_data = NotificationEmailData(job_id=job.id)
- notification_email_data.notify_status = notified_on_status
- notification_email_data.creation_date = datetime.utcnow()
- job.notification_data[0].notified_on = notified_on_status
- session.add(notification_email_data)
-
-
-def __create_job_service(job):
- job_dir = os.path.join(config.DATA_DIR,
- str(job.user_id),
- 'jobs',
- str(job.id))
- cmd = '{} -i /files -o /files/output'.format(job.service)
- if job.service == 'file-setup':
- cmd += ' -f {}'.format(job.secure_filename)
- cmd += ' --log-dir /files'
- cmd += ' --zip [{}]_{}'.format(job.service, job.secure_filename)
- cmd += ' ' + ' '.join(json.loads(job.service_args))
- service_args = {'command': cmd,
- 'constraints': ['node.role==worker'],
- 'labels': {'origin': 'nopaque',
- 'type': 'service.{}'.format(job.service),
- 'job_id': str(job.id)},
- 'mounts': [job_dir + ':/files:rw'],
- 'name': 'job_{}'.format(job.id),
- 'resources': docker.types.Resources(
- cpu_reservation=job.n_cores * (10 ** 9),
- mem_reservation=job.mem_mb * (10 ** 6)),
- 'restart_policy': docker.types.RestartPolicy()}
- service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/'
- + job.service + ':' + job.service_version)
- try:
- service = docker_client.services.get(service_args['name'])
- except docker.errors.NotFound:
- pass
- except docker.errors.DockerException:
- return
- else:
- service.remove()
- try:
- docker_client.services.create(service_image, **service_args)
- except docker.errors.DockerException:
- job.status = 'failed'
- else:
- job.status = 'queued'
-
-
-def __checkout_job_service(job, session):
- service_name = 'job_{}'.format(job.id)
- try:
- service = docker_client.services.get(service_name)
- except docker.errors.NotFound:
- logging.error('__checkout_job_service({}): '.format(job.id)
- + 'The service does not exist. '
- + '(status: {} -> failed)'.format(job.status))
- job.status = 'failed'
- return
- except docker.errors.DockerException:
- return
- service_tasks = service.tasks()
- if not service_tasks:
- return
- task_state = service_tasks[0].get('Status').get('State')
- if job.status == 'queued' and task_state != 'pending':
- job.status = 'running'
- elif (job.status == 'running'
- and (task_state == 'complete' or task_state == 'failed')):
- service.remove()
- job.end_date = datetime.utcnow()
- job.status = task_state
- if task_state == 'complete':
- results_dir = os.path.join(config.DATA_DIR,
- str(job.user_id),
- 'jobs',
- str(job.id),
- 'output')
- results = filter(lambda x: x.endswith('.zip'),
- os.listdir(results_dir))
- for result in results:
- job_result = JobResult(dir=results_dir,
- filename=result,
- job_id=job.id)
- session.add(job_result)
-
-
-def __remove_job_service(job):
- service_name = 'job_{}'.format(job.id)
- try:
- service = docker_client.services.get(service_name)
- except docker.errors.NotFound:
- job.status = 'canceled'
- except docker.errors.DockerException:
- return
- else:
- service.update(mounts=None)
- service.remove()
diff --git a/daemon/app/tasks/libnotify/__init__.py b/daemon/app/tasks/libnotify/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/daemon/app/tasks/libnotify/notification.py b/daemon/app/tasks/libnotify/notification.py
deleted file mode 100644
index 488471c3..00000000
--- a/daemon/app/tasks/libnotify/notification.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from email.message import EmailMessage
-
-
-class Notification(EmailMessage):
- """docstring for Email."""
-
- def set_notification_content(self,
- subject_template,
- subject_template_values_dict,
- body_txt_template_path,
- body_html_template_path,
- body_template_values_dict):
- # Create subject with subject_template_values_dict
- self['subject'] = subject_template.format(
- **subject_template_values_dict)
- # Open template files and insert values from body_template_values_dict
- with open(body_txt_template_path) as nfile:
- self.body = nfile.read().format(**body_template_values_dict)
- with open(body_html_template_path) as nfile:
- self.html = nfile.read().format(**body_template_values_dict)
- # Set txt of email
- self.set_content(self.body)
- # Set html alternative
- self.add_alternative(self.html, subtype='html')
-
- def set_addresses(self, sender, recipient):
- self['From'] = sender
- self['to'] = recipient
diff --git a/daemon/app/tasks/libnotify/service.py b/daemon/app/tasks/libnotify/service.py
deleted file mode 100644
index 633fb386..00000000
--- a/daemon/app/tasks/libnotify/service.py
+++ /dev/null
@@ -1,16 +0,0 @@
-class NotificationService:
- """This is a nopaque notifcation service object."""
-
- def __init__(self, smtp):
- # Bool to show if the mail server stoped sending mails due to exceeding
- # its sending limit
- self.mail_limit_exceeded = False
- # Holds due to an error unsent email notifications
- self.not_sent = {}
- self.smtp = smtp
-
- def send(self, email):
- self.smtp.send_message(email)
-
- def quit(self):
- self.smtp.quit()
diff --git a/daemon/app/tasks/libnotify/templates/notification.html b/daemon/app/tasks/libnotify/templates/notification.html
deleted file mode 100644
index e2edfe75..00000000
--- a/daemon/app/tasks/libnotify/templates/notification.html
+++ /dev/null
@@ -1,15 +0,0 @@
-
-
- Dear {username},
-
- The status of your Job/Corpus({id}) with the title "{title}" has changed!
- It is now {status}!
- Time of this status update was: {time} UTC
-
- You can access your Job/Corpus here: {url}
-
-
- Kind regards!
- Your nopaque team
-
-
diff --git a/daemon/app/tasks/libnotify/templates/notification.txt b/daemon/app/tasks/libnotify/templates/notification.txt
deleted file mode 100644
index 0e221c54..00000000
--- a/daemon/app/tasks/libnotify/templates/notification.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-Dear {username},
-
-The status of your Job/Corpus({id}) with the title "{title}" has changed!
-It is now {status}!
-Time of this status update was: {time} UTC
-
-You can access your Job/Corpus here: {url}
-
-Kind regards!
-Your nopaque team
\ No newline at end of file
diff --git a/daemon/app/tasks/notify.py b/daemon/app/tasks/notify.py
deleted file mode 100644
index 5d3d23f3..00000000
--- a/daemon/app/tasks/notify.py
+++ /dev/null
@@ -1,111 +0,0 @@
-from sqlalchemy import asc
-from .libnotify.notification import Notification
-from .libnotify.service import NotificationService
-from .. import configuration as config
-from .. import Session
-from ..decorators import background
-from ..models import NotificationEmailData
-import logging
-import os
-import smtplib
-
-
-ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
-
-
-@background
-def notify():
- session = Session()
- if config.SMTP_USE_SSL:
- smtp = smtplib.SMTP_SSL(host=config.SMTP_SERVER, port=config.SMTP_PORT)
- else:
- smtp = smtplib.SMTP(host=config.SMTP_SERVER, port=config.SMTP_PORT)
- if config.SMTP_USE_TLS:
- smtp.starttls()
- try:
- smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD)
- except smtplib.SMTPHeloError:
- logging.warning('The server didn’t reply properly to the HELO '
- 'greeting.')
- return
- except smtplib.SMTPAuthenticationError as e:
- logging.warning('The server didn’t accept the username/password '
- 'combination.')
- logging.warning(e)
- return
- except smtplib.SMTPNotSupportedError:
- logging.warning('The AUTH command is not supported by the server.')
- return
- except smtplib.SMTPException:
- logging.warning('No suitable authentication method was found.')
- return
- notification_service = NotificationService(smtp)
- # create notifications (content, recipient etc.)
- notifications = __create_mail_notifications(notification_service, session)
- # only login and send mails if there are any notifications
- if (len(notifications) > 0):
- # combine new and unsent notifications
- notifications.update(notification_service.not_sent)
- # send all notifications
- __send_mail_notifications(notifications, notification_service)
- # remove unsent notifications because they have been sent now
- # but only if mail limit has not been exceeded
- if (notification_service.mail_limit_exceeded is not True):
- notification_service.not_sent = {}
- smtp.quit()
- Session.remove()
-
-
-# Email notification functions
-def __create_mail_notifications(notification_service, session):
- notification_email_data = session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all() # noqa
- notifications = {}
- for data in notification_email_data:
- notification = Notification()
- notification.set_addresses(config.SMTP_DEFAULT_SENDER,
- data.job.user.email)
- subject_template = ('[nopaque] Status update for your Job/Corpora: '
- '{title}!')
- subject_template_values_dict = {'title': data.job.title}
- url = '{}://{}/{}/{}'.format(config.PROTOCOL,
- config.DOMAIN,
- 'jobs',
- data.job.id)
- body_template_values_dict = {'username': data.job.user.username,
- 'id': data.job.id,
- 'title': data.job.title,
- 'status': data.notify_status,
- 'time': data.creation_date,
- 'url': url}
- txt_tmplt = os.path.join(ROOT_DIR,
- 'libnotify/templates/notification.txt')
- html_tmplt = os.path.join(ROOT_DIR,
- 'libnotify/templates/notification.html')
- notification.set_notification_content(subject_template,
- subject_template_values_dict,
- txt_tmplt,
- html_tmplt,
- body_template_values_dict)
- notifications[data.job.id] = notification
- # Using a dictionary for notifications avoids sending multiple mails
- # if the status of a job changes in a few seconds. The user will not
- # get swamped with mails for queued, running and complete if those
- # happen in in a few seconds. Only the last update will be sent.
- # This depends on the sleep time interval though.
- session.delete(data)
- session.commit()
- return notifications
-
-
-def __send_mail_notifications(notifications, notification_service):
- for key, notification in notifications.items():
- try:
- notification_service.send(notification)
- notification_service.mail_limit_exceeded = False
- except Exception:
- # Adds notifications to unsent if mail server exceded limit for
- # consecutive mail sending
- logging.warning('limit')
- notification_service.not_sent[key] = notification
- notification_service.mail_limit_exceeded = True
- notification_service.not_sent.update(notifications)
diff --git a/daemon/boot.sh b/daemon/boot.sh
deleted file mode 100755
index 53127dd0..00000000
--- a/daemon/boot.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-source venv/bin/activate
-python nopaqued.py
diff --git a/daemon/config.py b/daemon/config.py
deleted file mode 100644
index 8729b563..00000000
--- a/daemon/config.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import logging
-import os
-
-
-ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
-
-
-class Config:
- ''' # Email # '''
- SMTP_DEFAULT_SENDER = os.environ.get('NOPAQUE_SMTP_DEFAULT_SENDER')
- SMTP_PASSWORD = os.environ.get('NOPAQUE_SMTP_PASSWORD')
- SMTP_PORT = int(os.environ.get('NOPAQUE_SMTP_PORT'))
- SMTP_SERVER = os.environ.get('NOPAQUE_SMTP_SERVER')
- SMTP_USERNAME = os.environ.get('NOPAQUE_SMTP_USERNAME')
- SMTP_USE_SSL = os.environ.get(
- 'NOPAQUE_SMTP_USE_SSL', 'false').lower() == 'true'
- SMTP_USE_TLS = os.environ.get(
- 'NOPAQUE_SMTP_USE_TLS', 'false').lower() == 'true'
-
- ''' # General # '''
- DATA_DIR = os.environ.get('NOPAQUE_DATA_DIR', '/mnt/nopaque')
- DOMAIN = os.environ.get('NOPAQUE_DOMAIN', 'localhost')
- PROTOCOL = os.environ.get('NOPAQUE_PROTOCOL', 'http')
- SECRET_KEY = os.environ.get('NOPAQUE_SECRET_KEY', 'hard to guess string')
-
- ''' # Logging # '''
- LOG_DATE_FORMAT = os.environ.get('NOPAQUE_LOG_DATE_FORMAT',
- '%Y-%m-%d %H:%M:%S')
- LOG_FILE = os.environ.get('NOPAQUED_LOG_FILE',
- os.path.join(ROOT_DIR, 'nopaqued.log'))
- LOG_FORMAT = os.environ.get(
- 'NOPAQUE_LOG_FORMAT',
- '[%(asctime)s] %(levelname)s in '
- '%(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s'
- )
- LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL', 'WARNING')
-
- @classmethod
- def init(cls):
- # Set up logging according to the corresponding (LOG_*) variables
- logging.basicConfig(datefmt=cls.LOG_DATE_FORMAT,
- filename=cls.LOG_FILE,
- format=cls.LOG_FORMAT,
- level=cls.LOG_LEVEL)
-
-
-class DevelopmentConfig(Config):
- ''' # Database # '''
- SQLALCHEMY_DATABASE_URI = os.environ.get(
- 'NOPAQUE_DEV_DATABASE_URL',
- 'sqlite:///' + os.path.join(ROOT_DIR, 'data-dev.sqlite')
- )
-
-
-class ProductionConfig(Config):
- ''' # Database # '''
- SQLALCHEMY_DATABASE_URI = os.environ.get(
- 'NOPAQUE_DATABASE_URL',
- 'sqlite:///' + os.path.join(ROOT_DIR, 'data.sqlite')
- )
-
-
-class TestingConfig(Config):
- ''' # Database # '''
- SQLALCHEMY_DATABASE_URI = os.environ.get(
- 'NOPAQUE_TEST_DATABASE_URL', 'sqlite://')
-
-
-config = {'development': DevelopmentConfig,
- 'production': ProductionConfig,
- 'testing': TestingConfig}
diff --git a/daemon/nopaqued.py b/daemon/nopaqued.py
deleted file mode 100644
index 7fbb79dc..00000000
--- a/daemon/nopaqued.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from dotenv import load_dotenv
-from app import run
-import os
-
-
-# Load environment variables
-DOTENV_FILE = os.path.join(os.path.dirname(__file__), '.env')
-if os.path.exists(DOTENV_FILE):
- load_dotenv(DOTENV_FILE)
-
-
-if __name__ == '__main__':
- run()
diff --git a/daemon/requirements.txt b/daemon/requirements.txt
deleted file mode 100644
index de767e32..00000000
--- a/daemon/requirements.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-docker
-psycopg2
-python-dotenv
-SQLAlchemy
diff --git a/web/boot.sh b/web/boot.sh
index d6536518..836e91ab 100755
--- a/web/boot.sh
+++ b/web/boot.sh
@@ -1,7 +1,7 @@
#!/bin/bash
if [[ "${NOPAQUE_DAEMON_ENABLED}" == "True" ]]; then
- echo "Starting nopaque daemon..."
+ echo "INFO Starting nopaque daemon process..."
./nopaque-daemon.sh &
fi
@@ -13,7 +13,7 @@ if [[ "${#}" -eq 0 ]]; then
if [[ "${?}" == "0" ]]; then
break
fi
- echo Deploy command failed, retrying in 5 secs...
+ echo "Deploy command failed, retrying in 5 secs..."
sleep 5
done
python nopaque.py