Huge config update and smtp fix for daemon

This commit is contained in:
Patrick Jentsch 2020-10-08 12:34:02 +02:00
parent 5e221d90ad
commit dff92cbf4d
43 changed files with 613 additions and 1204 deletions

189
.env.tpl
View File

@ -1,64 +1,145 @@
### Build ### ################################################################################
# Bash: getent group docker | cut -d: -f3 # Docker #
DOCKER_GID= ################################################################################
# Bash: id -g # DEFAULT: ./db
GID= # NOTE: Use `.` as <project-root-dir>
# Bash: id -u # HOST_DB_DIR=
UID=
# Example: 999
# HINT: Use this bash command `getent group docker | cut -d: -f3`
HOST_DOCKER_GID=
# Example: 1000
# HINT: Use this bash command `id -g`
HOST_GID=
# DEFAULT: ./mq
# NOTE: Use `.` as <project-root-dir>
# HOST_MQ_DIR=
# DEFAULT: ./nopaqued.log
# NOTES: Use `.` as <project-root-dir>,
# This file must be present on container startup
# HOST_NOPAQUE_DAEMON_LOG_FILE=
# DEFAULT: ./nopaque.log
# NOTES: Use `.` as <project-root-dir>,
# This file must be present on container startup
# HOST_NOPAQUE_LOG_FILE=
# Example: 1000
# HINT: Use this bash command `id -u`
HOST_UID=
################################################################################
# Database (only PostgreSQL) #
################################################################################
NOPAQUE_DB_HOST=
### Runtime ### NOPAQUE_DB_NAME=
# Fill out these variables to use the Docker HTTP socket. When doing this, you
# can remove the Docker UNIX socket mount from the docker-compose file. NOPAQUE_DB_PASSWORD=
# Example: /home/nopaqued/.docker
# DOCKER_CERT_PATH= # DEFAULT: 5432
# Example: host.docker.internal # NOPAQUE_DB_PORT=
# DOCKER_HOST=
NOPAQUE_DB_USERNAME=
################################################################################
# SMTP #
################################################################################
# EXAMPLE: nopaque Admin <nopaque@example.com>
NOPAQUE_SMTP_DEFAULT_SENDER=
NOPAQUE_SMTP_PASSWORD=
# EXAMPLE: smtp.example.com
NOPAQUE_SMTP_SERVER=
# EXAMPLE: 587
NOPAQUE_SMTP_PORT=
# DEFAULT: False
# Choose one: False, True # Choose one: False, True
# DOCKER_TLS_VERIFY= # NOPAQUE_SMTP_USE_SSL=
# Choose one: development, production, testing # DEFAULT: False
FLASK_CONFIG=
# Bash: python -c "import uuid; print(uuid.uuid4().hex)"
SECRET_KEY=
# Example: -
GITLAB_USERNAME=
# Example: -
GITLAB_PASSWORD=
# Example: smtp.example.com
MAIL_SERVER=
# Example: 587
MAIL_PORT=
# Choose one: False, True # Choose one: False, True
MAIL_USE_TLS= # NOPAQUE_SMTP_USE_TLS=
# Example: nopaque@example.com
MAIL_USERNAME=
# Example: -
MAIL_PASSWORD=
# Example: nopaque@example.com # EXAMPLE: nopaque@example.com
NOPAQUE_ADMIN= NOPAQUE_SMTP_USERNAME=
# Example: nopaque@example.com
NOPAQUE_CONTACT=
# Example: nopaque.localhost ################################################################################
NOPAQUE_DOMAIN= # General #
################################################################################
# Example: admin.nopaque@example.com
NOPAQUE_ADMIN_EMAIL_ADRESS=
# Example: contact.nopaque@example.com
NOPAQUE_CONTACT_EMAIL_ADRESS=
# DEFAULT: /mnt/nopaque
# NOTE: This must be a network share and it must be available on all Docker Swarm nodes
# NOPAQUE_DATA_DIR=
# DEFAULT: False
# Choose one: False, True # Choose one: False, True
NOPAQUE_EXECUTE_NOTIFICATIONS= # NOPAQUE_DEBUG=
# Choose one: CRITICAL, ERROR, WARNING, INFO, DEBUG
NOPAQUE_LOG_LEVEL= # DEFAULT: localhost
# Example: nopaque Admin <nopaque@example.com> # NOPAQUE_DOMAIN=
NOPAQUE_MAIL_SENDER=
# DEFAULT: 0
# NOPAQUE_NUM_PROXIES=
# DEFAULT: http
# Choose one: http, https # Choose one: http, https
NOPAQUE_PROTOCOL= # NOPAQUE_PROTOCOL=
# Example: /mnt/nopaque
NOPAQUE_STORAGE=
# Example: nopaque # DEFAULT: 5
POSTGRES_DB_NAME= # NOPAQUE_RESSOURCES_PER_PAGE=
# Example: -
POSTGRES_USER= # DEFAULT: hard to guess string
# Example: - # HINT: Use this bash command `python -c "import uuid; print(uuid.uuid4().hex)"`
POSTGRES_PASSWORD= NOPAQUE_SECRET_KEY=
# DEFAULT: 10
# NOPAQUE_USERS_PER_PAGE=
################################################################################
# Logging #
################################################################################
# DEFAULT: <nopaqued-root-dir>/nopaqued.log ~ /home/nopaqued/nopaqued.log
# NOTE: Use `.` as <nopaqued-root-dir>
# NOPAQUE_DAEMON_LOG_FILE=
# DEFAULT: %Y-%m-%d %H:%M:%S
# NOPAQUE_LOG_DATE_FORMAT=
# DEFAULT: <nopaque-root-dir>/NOPAQUE.log ~ /home/NOPAQUE/NOPAQUE.log
# NOTE: Use `.` as <nopaque-root-dir>
# NOPAQUE_LOG_FILE=
# DEFAULT: [%(asctime)s] %(levelname)s in %(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s
# NOPAQUE_LOG_FORMAT=
# DEFAULT: ERROR
# Choose one: CRITICAL, ERROR, WARNING, INFO, DEBUG
# NOPAQUE_LOG_LEVEL=
################################################################################
# Message queue #
################################################################################
NOPAQUE_MQ_HOST=
# EXAMPLE: 6379
NOPAQUE_MQ_PORT=
# Choose one of the supported types by Flask-SocketIO
NOPAQUE_MQ_TYPE=

37
.gitignore vendored
View File

@ -1,6 +1,37 @@
docker-compose.override.yml
nopaque.log nopaque.log
nopaqued.log nopaqued.log
.DS_Store
*.env *.py[cod]
# C extensions
*.so
# Docker related files
docker-compose.override.yml
db
mq
# Environment files
.env
# Installer logs
pip-log.txt
# Packages
*.egg
*.egg-info
dist
build
eggs
parts
bin
var
sdist
develop-eggs
.installed.cfg
lib
lib64
__pycache__ __pycache__
# Virtual environment
venv

View File

@ -1,3 +1,6 @@
# Docker related files
Dockerfile Dockerfile
.dockerignore .dockerignore
*.bak
# Packages
__pycache__

View File

@ -1,7 +1,7 @@
FROM python:3.6-slim-stretch FROM python:3.6-slim-stretch
LABEL maintainer="inf_sfb1288@lists.uni-bielefeld.de" LABEL authors="Patrick Jentsch <p.jentsch@uni-bielefeld.de>, Stephan Porada <sporada@uni-bielefeld.de>"
ARG DOCKER_GID ARG DOCKER_GID
@ -15,7 +15,7 @@ RUN apt-get update \
build-essential \ build-essential \
libpq-dev \ libpq-dev \
wait-for-it \ wait-for-it \
&& rm -rf /var/lib/apt/lists/* && rm -r /var/lib/apt/lists/*
RUN groupadd --gid ${DOCKER_GID} --system docker \ RUN groupadd --gid ${DOCKER_GID} --system docker \
@ -31,4 +31,4 @@ RUN python -m venv venv \
&& mkdir logs && mkdir logs
ENTRYPOINT ["./docker-entrypoint.sh"] ENTRYPOINT ["./boot.sh"]

9
daemon/boot.sh Executable file
View File

@ -0,0 +1,9 @@
#!/bin/bash
echo "Waiting for db..."
wait-for-it "${NOPAQUE_DB_HOST}:${NOPAQUE_DB_PORT:-5432}" --strict --timeout=0
echo "Waiting for nopaque..."
wait-for-it nopaque:5000 --strict --timeout=0
source venv/bin/activate
python nopaqued.py

61
daemon/config.py Normal file
View File

@ -0,0 +1,61 @@
import logging
import os
root_dir = os.path.abspath(os.path.dirname(__file__))
DEFAULT_DATA_DIR = os.path.join('/mnt/data')
DEFAULT_DB_PORT = '5432'
DEFAULT_DOMAIN = 'localhost'
DEFAULT_LOG_DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
DEFAULT_LOG_FILE = os.path.join(root_dir, 'nopaqued.log')
DEFAULT_LOG_FORMAT = ('[%(asctime)s] %(levelname)s in %(pathname)s '
'(function: %(funcName)s, line: %(lineno)d): '
'%(message)s')
DEFAULT_LOG_LEVEL = 'ERROR'
DEFAULT_MAIL_USE_SSL = 'False'
DEFAULT_MAIL_USE_TLS = 'False'
DEFAULT_PROTOCOL = 'http'
class Config:
''' ### Database ### '''
DB_HOST = os.environ.get('NOPAQUE_DB_HOST')
DB_NAME = os.environ.get('NOPAQUE_DB_NAME')
DB_PASSWORD = os.environ.get('NOPAQUE_DB_PASSWORD')
DB_PORT = os.environ.get('NOPAQUE_DB_PORT', DEFAULT_DB_PORT)
DB_USERNAME = os.environ.get('NOPAQUE_DB_USERNAME')
SQLALCHEMY_DATABASE_URI = 'postgresql://{}:{}@{}:{}/{}'.format(
DB_USERNAME, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME)
''' ### SMTP ### '''
SMTP_DEFAULT_SENDER = os.environ.get('NOPAQUE_SMTP_DEFAULT_SENDER')
SMTP_PASSWORD = os.environ.get('NOPAQUE_SMTP_PASSWORD')
SMTP_PORT = os.environ.get('NOPAQUE_SMTP_PORT')
SMTP_SERVER = os.environ.get('NOPAQUE_SMTP_SERVER')
SMTP_USERNAME = os.environ.get('NOPAQUE_SMTP_USERNAME')
SMTP_USE_SSL = os.environ.get('NOPAQUE_SMTP_USE_SSL',
DEFAULT_MAIL_USE_SSL).lower() == 'true'
SMTP_USE_TLS = os.environ.get('NOPAQUE_SMTP_USE_TLS',
DEFAULT_MAIL_USE_TLS).lower() == 'true'
''' ### General ### '''
DATA_DIR = os.environ.get('NOPAQUE_DATA_DIR', DEFAULT_DATA_DIR)
DOMAIN = os.environ.get('NOPAQUE_DOMAIN', DEFAULT_DOMAIN)
PROTOCOL = os.environ.get('NOPAQUE_PROTOCOL', DEFAULT_PROTOCOL)
''' ### Logging ### '''
LOG_DATE_FORMAT = os.environ.get('NOPAQUE_LOG_DATE_FORMAT',
DEFAULT_LOG_DATE_FORMAT)
LOG_FILE = os.environ.get('NOPAQUE_DAEMON_LOG_FILE', DEFAULT_LOG_FILE)
LOG_FORMAT = os.environ.get('NOPAQUE_LOG_FORMAT', DEFAULT_LOG_FORMAT)
LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL', DEFAULT_LOG_LEVEL)
def init_app(self):
# Configure logging according to the corresponding (LOG_*) config
# entries
logging.basicConfig(datefmt=self.LOG_DATE_FORMAT,
filename=self.LOG_FILE,
format=self.LOG_FORMAT,
level=self.LOG_LEVEL)

View File

@ -1,9 +0,0 @@
#!/bin/bash
echo "Waiting for db..."
wait-for-it db:5432 --strict --timeout=0
echo "Waiting for web..."
wait-for-it web:5000 --strict --timeout=0
source venv/bin/activate
python nopaqued.py

View File

@ -1,30 +0,0 @@
import os
import logging
def init_logger():
'''
Functions initiates a logger instance.
'''
os.makedirs('logs', exist_ok=True)
logging.basicConfig(filename='logs/nopaqued.log',
format='[%(asctime)s] %(levelname)s in '
'%(pathname)s:%(lineno)d - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S', filemode='w')
NOPAQUE_LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL')
if NOPAQUE_LOG_LEVEL is None:
FLASK_CONFIG = os.environ.get('FLASK_CONFIG')
if FLASK_CONFIG == 'development':
logging.basicConfig(level='DEBUG')
elif FLASK_CONFIG == 'testing':
# TODO: Set an appropriate log level
pass
elif FLASK_CONFIG == 'production':
logging.basicConfig(level='ERROR')
else:
logging.basicConfig(level=NOPAQUE_LOG_LEVEL)
return logging.getLogger(__name__)
if __name__ == '__main__':
init_logger()

View File

@ -2,26 +2,20 @@ from tasks.check_corpora import check_corpora
from tasks.check_jobs import check_jobs from tasks.check_jobs import check_jobs
from tasks.notify import notify from tasks.notify import notify
from time import sleep from time import sleep
import os
def nopaqued(): def nopaqued():
NOPAQUE_EXECUTE_NOTIFICATIONS = os.environ.get('NOPAQUE_EXECUTE_NOTIFICATIONS', 'True').lower() == 'true' # noqa check_corpora_thread = check_corpora()
threads = {'check_corpora': None, 'check_jobs': None, 'notify': None} check_jobs_thread = check_jobs()
notify_thread = notify()
threads['check_corpora'] = check_corpora()
threads['check_jobs'] = check_jobs()
threads['notify'] = notify(NOPAQUE_EXECUTE_NOTIFICATIONS)
while True: while True:
if not threads['check_corpora'].is_alive(): if not check_corpora_thread.is_alive():
threads['check_corpora'] = check_corpora() check_corpora_thread = check_corpora()
if not threads['check_jobs'].is_alive(): if not check_jobs_thread.is_alive():
threads['check_jobs'] = check_jobs() check_jobs_thread = check_jobs()
if not threads['notify'].is_alive(): if not notify_thread.is_alive():
threads['notify'] = notify(NOPAQUE_EXECUTE_NOTIFICATIONS) notify_thread = notify()
# If execute_notifications True mails are sent.
# If execute_notifications False no mails are sent.
# But notification status will be set nonetheless.
sleep(3) sleep(3)

View File

@ -1,455 +0,0 @@
from notify.notification import Notification
from notify.service import NotificationService
from sqlalchemy import create_engine, asc
from sqlalchemy.orm import Session, relationship
from sqlalchemy.ext.automap import automap_base
from datetime import datetime
from time import sleep
import docker
import json
import logging
import os
import shutil
''' Global constants '''
NOPAQUE_STORAGE = os.environ.get('NOPAQUE_STORAGE')
''' Global variables '''
docker_client = None
session = None
# Classes for database models
Base = automap_base()
class Corpus(Base):
__tablename__ = 'corpora'
files = relationship('CorpusFile', collection_class=set)
class CorpusFile(Base):
__tablename__ = 'corpus_files'
class Job(Base):
__tablename__ = 'jobs'
inputs = relationship('JobInput', collection_class=set)
results = relationship('JobResult', collection_class=set)
notification_data = relationship('NotificationData', collection_class=list)
notification_email_data = relationship('NotificationEmailData', collection_class=list)
class NotificationData(Base):
__tablename__ = 'notification_data'
job = relationship('Job', collection_class=set)
class NotificationEmailData(Base):
__tablename__ = 'notification_email_data'
job = relationship('Job', collection_class=set)
class JobInput(Base):
__tablename__ = 'job_results'
class JobResult(Base):
__tablename__ = 'job_results'
class User(Base):
__tablename__ = 'users'
jobs = relationship('Job', collection_class=set)
corpora = relationship('Corpus', collection_class=set)
def check_corpora():
corpora = session.query(Corpus).all()
for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora):
__create_build_corpus_service(corpus)
for corpus in filter(lambda corpus: (corpus.status == 'queued'
or corpus.status == 'running'),
corpora):
__checkout_build_corpus_service(corpus)
for corpus in filter(lambda corpus: corpus.status == 'start analysis',
corpora):
__create_cqpserver_container(corpus)
for corpus in filter(lambda corpus: corpus.status == 'stop analysis',
corpora):
__remove_cqpserver_container(corpus)
def __create_build_corpus_service(corpus):
corpus_dir = os.path.join(NOPAQUE_STORAGE, str(corpus.user_id),
'corpora', str(corpus.id))
corpus_data_dir = os.path.join(corpus_dir, 'data')
corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt')
corpus_registry_dir = os.path.join(corpus_dir, 'registry')
if os.path.exists(corpus_data_dir):
shutil.rmtree(corpus_data_dir)
if os.path.exists(corpus_registry_dir):
shutil.rmtree(corpus_registry_dir)
os.mkdir(corpus_data_dir)
os.mkdir(corpus_registry_dir)
service_args = {'command': 'docker-entrypoint.sh build-corpus',
'constraints': ['node.role==worker'],
'labels': {'origin': 'nopaque',
'type': 'corpus.prepare',
'corpus_id': str(corpus.id)},
'mounts': [corpus_file + ':/root/files/corpus.vrt:ro',
corpus_data_dir + ':/corpora/data:rw',
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'name': 'build-corpus_{}'.format(corpus.id),
'restart_policy': docker.types.RestartPolicy()}
service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest')
try:
service = docker_client.services.get(service_args['name'])
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
service.remove()
try:
docker_client.services.create(service_image, **service_args)
except docker.errors.DockerException:
corpus.status = 'failed'
else:
corpus.status = 'queued'
def __checkout_build_corpus_service(corpus):
service_name = 'build-corpus_{}'.format(corpus.id)
try:
service = docker_client.services.get(service_name)
except docker.errors.NotFound:
logger.error('__checkout_build_corpus_service({}):'.format(corpus.id)
+ ' The service does not exist.'
+ ' (stauts: {} -> failed)'.format(corpus.status))
corpus.status = 'failed'
return
except docker.errors.DockerException:
return
service_tasks = service.tasks()
if not service_tasks:
return
task_state = service_tasks[0].get('Status').get('State')
if corpus.status == 'queued' and task_state != 'pending':
corpus.status = 'running'
elif corpus.status == 'running' and task_state == 'complete':
service.remove()
corpus.status = 'prepared'
elif corpus.status == 'running' and task_state == 'failed':
service.remove()
corpus.status = task_state
def __create_cqpserver_container(corpus):
corpus_dir = os.path.join(NOPAQUE_STORAGE, str(corpus.user_id),
'corpora', str(corpus.id))
corpus_data_dir = os.path.join(corpus_dir, 'data')
corpus_registry_dir = os.path.join(corpus_dir, 'registry')
container_args = {'command': 'cqpserver',
'detach': True,
'volumes': [corpus_data_dir + ':/corpora/data:rw',
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'name': 'cqpserver_{}'.format(corpus.id),
'network': 'opaque_default'}
container_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest')
try:
container = docker_client.containers.get(container_args['name'])
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
container.remove(force=True)
try:
docker_client.containers.run(container_image, **container_args)
except docker.errors.DockerException:
return
else:
corpus.status = 'analysing'
def __remove_cqpserver_container(corpus):
container_name = 'cqpserver_{}'.format(corpus.id)
try:
container = docker_client.containers.get(container_name)
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
container.remove(force=True)
corpus.status = 'prepared'
def check_jobs():
jobs = session.query(Job).all()
for job in filter(lambda job: job.status == 'submitted', jobs):
__create_job_service(job)
for job in filter(lambda job: (job.status == 'queued'), jobs):
__checkout_job_service(job)
# __add_notification_data(job, 'queued')
for job in filter(lambda job: (job.status == 'running'), jobs):
__checkout_job_service(job)
# __add_notification_data(job, 'running')
# for job in filter(lambda job: job.status == 'complete', jobs):
# __add_notification_data(job, 'complete')
# for job in filter(lambda job: job.status == 'failed', jobs):
#__add_notification_data(job, 'failed')
for job in filter(lambda job: job.status == 'canceling', jobs):
__remove_job_service(job)
def __add_notification_data(job, notified_on_status):
# checks if user wants any notifications at all
if (job.user.setting_job_status_mail_notifications == 'none'):
# logger.warning('User does not want any notifications!')
return
# checks if user wants only notification on completed jobs
elif (job.user.setting_job_status_mail_notifications == 'end'
and notified_on_status != 'complete'):
# logger.warning('User only wants notifications on job completed!')
return
else:
# check if a job already has associated NotificationData
notification_exists = len(job.notification_data)
# create notification_data for current job if there is none
if (notification_exists == 0):
notification_data = NotificationData(job_id=job.id)
session.add(notification_data)
session.commit() # If no commit job will have no NotificationData
# logger.warning('Created NotificationData for current Job.'))
else:
pass
# logger.warning('Job already had notification: {}'.format(notification_exists))
if (job.notification_data[0].notified_on != notified_on_status):
notification_email_data = NotificationEmailData(job_id=job.id)
notification_email_data.notify_status = notified_on_status
notification_email_data.creation_date = datetime.utcnow()
job.notification_data[0].notified_on = notified_on_status
session.add(notification_email_data)
# logger.warning('Created NotificationEmailData for current Job.')
else:
# logger.warning('NotificationEmailData has already been created for current Job!')
pass
def __create_job_service(job):
job_dir = os.path.join(NOPAQUE_STORAGE, str(job.user_id), 'jobs',
str(job.id))
service_args = {'command': ('{} /files /files/output'.format(job.service)
+ ' {}'.format(job.secure_filename if job.service == 'file-setup' else '')
+ ' --log-dir /files'
+ ' --zip [{}]_{}'.format(job.service, job.secure_filename)
+ ' ' + ' '.join(json.loads(job.service_args))),
'constraints': ['node.role==worker'],
'labels': {'origin': 'nopaque',
'type': 'service.{}'.format(job.service),
'job_id': str(job.id)},
'mounts': [job_dir + ':/files:rw'],
'name': 'job_{}'.format(job.id),
'resources': docker.types.Resources(
cpu_reservation=job.n_cores * (10 ** 9),
mem_reservation=job.mem_mb * (10 ** 6)),
'restart_policy': docker.types.RestartPolicy()}
service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/'
+ job.service + ':' + job.service_version)
try:
service = docker_client.services.get(service_args['name'])
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
service.remove()
try:
docker_client.services.create(service_image, **service_args)
except docker.errors.DockerException:
job.status = 'failed'
else:
job.status = 'queued'
def __checkout_job_service(job):
service_name = 'job_{}'.format(job.id)
try:
service = docker_client.services.get(service_name)
except docker.errors.NotFound:
logger.error('__checkout_job_service({}):'.format(job.id)
+ ' The service does not exist.'
+ ' (stauts: {} -> failed)'.format(job.status))
job.status = 'failed'
return
except docker.errors.DockerException:
return
service_tasks = service.tasks()
if not service_tasks:
return
task_state = service_tasks[0].get('Status').get('State')
if job.status == 'queued' and task_state != 'pending':
job.status = 'running'
elif (job.status == 'running'
and (task_state == 'complete' or task_state == 'failed')):
service.remove()
job.end_date = datetime.utcnow()
job.status = task_state
if task_state == 'complete':
results_dir = os.path.join(NOPAQUE_STORAGE, str(job.user_id),
'jobs', str(job.id), 'output')
results = filter(lambda x: x.endswith('.zip'),
os.listdir(results_dir))
for result in results:
job_result = JobResult(dir=results_dir, filename=result,
job_id=job.id)
session.add(job_result)
def __remove_job_service(job):
service_name = 'job_{}'.format(job.id)
try:
service = docker_client.services.get(service_name)
except docker.errors.NotFound:
job.status = 'canceled'
except docker.errors.DockerException:
return
else:
service.update(mounts=None)
service.remove()
def handle_jobs():
check_jobs()
def handle_corpora():
check_corpora()
# Email notification functions
def create_mail_notifications(notification_service):
notification_email_data = session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all()
notifications = {}
for data in notification_email_data:
notification = Notification()
notification.set_addresses(notification_service.email_address,
data.job.user.email)
subject_template = '[nopaque] Status update for your Job/Corpora: {title}!'
subject_template_values_dict = {'title': data.job.title}
protocol = os.environ.get('NOPAQUE_PROTOCOL')
domain = os.environ.get('NOPAQUE_DOMAIN')
url = '{protocol}://{domain}/{jobs}/{id}'.format(
protocol=protocol, domain=domain, jobs='jobs', id=data.job.id)
body_template_values_dict = {'username': data.job.user.username,
'id': data.job.id,
'title': data.job.title,
'status': data.notify_status,
'time': data.creation_date,
'url': url}
notification.set_notification_content(subject_template,
subject_template_values_dict,
'templates/notification_messages/notification.txt',
'templates/notification_messages/notification.html',
body_template_values_dict)
notifications[data.job.id] = notification
# Using a dictionary for notifications avoids sending multiple mails
# if the status of a job changes in a few seconds. The user will not get
# swamped with mails for queued, running and complete if those happen in
# in a few seconds. Only the last update will be sent.
session.delete(data)
return notifications
def send_mail_notifications(notifications, notification_service):
for key, notification in notifications.items():
try:
notification_service.send(notification)
notification_service.mail_limit_exceeded = False
except Exception as e:
# Adds notifications to unsent if mail server exceded limit for
# consecutive mail sending
notification_service.not_sent[key] = notification
notification_service.mail_limit_exceeded = True
def notify():
# Initialize notification service
notification_service = NotificationService()
notification_service.get_smtp_configs()
notification_service.set_server()
# create notifications (content, recipient etc.)
notifications = create_mail_notifications(notification_service)
# only login and send mails if there are any notifications
if (len(notifications) > 0):
try:
notification_service.login()
# combine new and unsent notifications
notifications.update(notification_service.not_sent)
# send all notifications
send_mail_notifications(notifications, notification_service)
# remove unsent notifications because they have been sent now
# but only if mail limit has not been exceeded
if (notification_service.mail_limit_exceeded is not True):
notification_service.not_sent = {}
notification_service.quit()
except Exception as e:
notification_service.not_sent.update(notifications)
# Logger functions #
def init_logger():
'''
Functions initiates a logger instance.
'''
global logger
if not os.path.isfile('logs/nopaqued.log'):
file_path = os.path.join(os.getcwd(), 'logs/nopaqued.log')
log = open(file_path, 'w+')
log.close()
logging.basicConfig(datefmt='%Y-%m-%d %H:%M:%S',
filemode='w', filename='logs/nopaqued.log',
format='%(asctime)s - %(levelname)s - %(name)s - '
'%(filename)s - %(lineno)d - %(message)s')
logger = logging.getLogger(__name__)
if os.environ.get('FLASK_CONFIG') == 'development':
logger.setLevel(logging.DEBUG)
if os.environ.get('FLASK_CONFIG') == 'production':
logger.setLevel(logging.WARNING)
def nopaqued():
global Base
global docker_client
global session
engine = create_engine(
'postgresql://{}:{}@db/{}'.format(
os.environ.get('POSTGRES_USER'),
os.environ.get('POSTGRES_PASSWORD'),
os.environ.get('POSTGRES_DB_NAME')))
Base.prepare(engine, reflect=True)
session = Session(engine)
session.commit()
docker_client = docker.from_env()
docker_client.login(password=os.environ.get('GITLAB_PASSWORD'),
registry="gitlab.ub.uni-bielefeld.de:4567",
username=os.environ.get('GITLAB_USERNAME'))
# executing background functions
while True:
handle_jobs()
handle_corpora()
# notify()
session.commit()
sleep(3)
if __name__ == '__main__':
init_logger()
nopaqued()

View File

@ -11,16 +11,17 @@ class Notification(EmailMessage):
body_html_template_path, body_html_template_path,
body_template_values_dict): body_template_values_dict):
# Create subject with subject_template_values_dict # Create subject with subject_template_values_dict
self['subject'] = subject_template.format(**subject_template_values_dict) self['subject'] = subject_template.format(
**subject_template_values_dict)
# Open template files and insert values from body_template_values_dict # Open template files and insert values from body_template_values_dict
with open(body_txt_template_path) as nfile: with open(body_txt_template_path) as nfile:
self.body_txt = nfile.read().format(**body_template_values_dict) self.body = nfile.read().format(**body_template_values_dict)
with open(body_html_template_path) as nfile: with open(body_html_template_path) as nfile:
self.body_html = nfile.read().format(**body_template_values_dict) self.html = nfile.read().format(**body_template_values_dict)
# Set txt of email # Set txt of email
self.set_content(self.body_txt) self.set_content(self.body)
# Set html alternative # Set html alternative
self.add_alternative(self.body_html, subtype='html') self.add_alternative(self.html, subtype='html')
def set_addresses(self, sender, recipient): def set_addresses(self, sender, recipient):
self['From'] = sender self['From'] = sender

View File

@ -1,41 +1,16 @@
import os class NotificationService:
import smtplib
class NotificationService(object):
"""This is a nopaque notifcation service object.""" """This is a nopaque notifcation service object."""
def __init__(self, execute_flag): def __init__(self, smtp):
super(NotificationService, self).__init__() # Bool to show if the mail server stoped sending mails due to exceeding
self.execute_flag = execute_flag # If True mails are sent normaly # its sending limit
# If False mails are not sent. Used to avoid sending mails for jobs self.mail_limit_exceeded = False
# that have been completed a long time ago. Use this if you implement # Holds due to an error unsent email notifications
# notify into an already existing nopaque instance. Change it to True self.not_sent = {}
# after the daemon has run one time with the flag set to False self.smtp = smtp
self.not_sent = {} # Holds due to an error unsent email notifications
self.mail_limit_exceeded = False # Bool to show if the mail server
# stoped sending mails due to exceeding its sending limit
def get_smtp_configs(self):
self.password = os.environ.get('MAIL_PASSWORD')
self.port = os.environ.get('MAIL_PORT')
self.server_str = os.environ.get('MAIL_SERVER')
self.tls = os.environ.get('MAIL_USE_TLS')
self.username = os.environ.get('MAIL_USERNAME').split("@")[0]
self.email_address = os.environ.get('MAIL_USERNAME')
def set_server(self):
self.smtp_server = smtplib.SMTP(host=self.server_str, port=self.port)
def login(self):
self.smtp_server.starttls()
self.smtp_server.login(self.username, self.password)
def send(self, email): def send(self, email):
if self.execute_flag: self.smtp.send_message(email)
self.smtp_server.send_message(email)
else:
return
def quit(self): def quit(self):
self.smtp_server.quit() self.smtp.quit()

View File

@ -1,6 +1,6 @@
from sqlalchemy.ext.automap import automap_base from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from tasks import engine from . import engine
Base = automap_base() Base = automap_base()

View File

@ -1,22 +1,11 @@
from config import Config
from sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.orm import scoped_session, sessionmaker
import os
import docker import docker
''' Global constants '''
NOPAQUE_STORAGE = os.environ.get('NOPAQUE_STORAGE')
''' Docker client ''' config = Config()
config.init_app()
docker_client = docker.from_env() docker_client = docker.from_env()
docker_client.login(password=os.environ.get('GITLAB_PASSWORD'), engine = create_engine(config.SQLALCHEMY_DATABASE_URI)
registry="gitlab.ub.uni-bielefeld.de:4567", Session = scoped_session(sessionmaker(bind=engine))
username=os.environ.get('GITLAB_USERNAME'))
''' Scoped session '''
engine = create_engine(
'postgresql://{}:{}@db/{}'.format(
os.environ.get('POSTGRES_USER'),
os.environ.get('POSTGRES_PASSWORD'),
os.environ.get('POSTGRES_DB_NAME')))
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)

View File

@ -1,16 +1,16 @@
from logger.logger import init_logger from . import config, docker_client, Session
from tasks import Session, docker_client, NOPAQUE_STORAGE from .decorators import background
from tasks.decorators import background from .models import Corpus
from tasks.Models import Corpus
import docker import docker
import logging
import os import os
import shutil import shutil
@background @background
def check_corpora(): def check_corpora():
c_session = Session() session = Session()
corpora = c_session.query(Corpus).all() corpora = session.query(Corpus).all()
for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora): for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora):
__create_build_corpus_service(corpus) __create_build_corpus_service(corpus)
for corpus in filter(lambda corpus: (corpus.status == 'queued' for corpus in filter(lambda corpus: (corpus.status == 'queued'
@ -23,13 +23,15 @@ def check_corpora():
for corpus in filter(lambda corpus: corpus.status == 'stop analysis', for corpus in filter(lambda corpus: corpus.status == 'stop analysis',
corpora): corpora):
__remove_cqpserver_container(corpus) __remove_cqpserver_container(corpus)
c_session.commit() session.commit()
Session.remove() Session.remove()
def __create_build_corpus_service(corpus): def __create_build_corpus_service(corpus):
corpus_dir = os.path.join(NOPAQUE_STORAGE, str(corpus.user_id), corpus_dir = os.path.join(config.DATA_DIR,
'corpora', str(corpus.id)) str(corpus.user_id),
'corpora',
str(corpus.id))
corpus_data_dir = os.path.join(corpus_dir, 'data') corpus_data_dir = os.path.join(corpus_dir, 'data')
corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt') corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt')
corpus_registry_dir = os.path.join(corpus_dir, 'registry') corpus_registry_dir = os.path.join(corpus_dir, 'registry')
@ -49,7 +51,8 @@ def __create_build_corpus_service(corpus):
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'name': 'build-corpus_{}'.format(corpus.id), 'name': 'build-corpus_{}'.format(corpus.id),
'restart_policy': docker.types.RestartPolicy()} 'restart_policy': docker.types.RestartPolicy()}
service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest') service_image = \
'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
try: try:
service = docker_client.services.get(service_args['name']) service = docker_client.services.get(service_args['name'])
except docker.errors.NotFound: except docker.errors.NotFound:
@ -67,12 +70,11 @@ def __create_build_corpus_service(corpus):
def __checkout_build_corpus_service(corpus): def __checkout_build_corpus_service(corpus):
logger = init_logger()
service_name = 'build-corpus_{}'.format(corpus.id) service_name = 'build-corpus_{}'.format(corpus.id)
try: try:
service = docker_client.services.get(service_name) service = docker_client.services.get(service_name)
except docker.errors.NotFound: except docker.errors.NotFound:
logger.error('__checkout_build_corpus_service({}):'.format(corpus.id) logging.error('__checkout_build_corpus_service({}):'.format(corpus.id)
+ ' The service does not exist.' + ' The service does not exist.'
+ ' (stauts: {} -> failed)'.format(corpus.status)) + ' (stauts: {} -> failed)'.format(corpus.status))
corpus.status = 'failed' corpus.status = 'failed'
@ -94,8 +96,10 @@ def __checkout_build_corpus_service(corpus):
def __create_cqpserver_container(corpus): def __create_cqpserver_container(corpus):
corpus_dir = os.path.join(NOPAQUE_STORAGE, str(corpus.user_id), corpus_dir = os.path.join(config.DATA_DIR,
'corpora', str(corpus.id)) str(corpus.user_id),
'corpora',
str(corpus.id))
corpus_data_dir = os.path.join(corpus_dir, 'data') corpus_data_dir = os.path.join(corpus_dir, 'data')
corpus_registry_dir = os.path.join(corpus_dir, 'registry') corpus_registry_dir = os.path.join(corpus_dir, 'registry')
container_args = {'command': 'cqpserver', container_args = {'command': 'cqpserver',
@ -104,7 +108,8 @@ def __create_cqpserver_container(corpus):
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'name': 'cqpserver_{}'.format(corpus.id), 'name': 'cqpserver_{}'.format(corpus.id),
'network': 'nopaque_default'} 'network': 'nopaque_default'}
container_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest') container_image = \
'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
try: try:
container = docker_client.containers.get(container_args['name']) container = docker_client.containers.get(container_args['name'])
except docker.errors.NotFound: except docker.errors.NotFound:

View File

@ -1,46 +1,42 @@
from datetime import datetime from datetime import datetime
from logger.logger import init_logger from . import config, docker_client, Session
from tasks import Session, docker_client, NOPAQUE_STORAGE from .decorators import background
from tasks.decorators import background from .models import Job, JobResult, NotificationData, NotificationEmailData
from tasks.Models import Job, NotificationData, NotificationEmailData, JobResult
import docker import docker
import logging
import json import json
import os import os
@background @background
def check_jobs(): def check_jobs():
# logger = init_logger() session = Session()
cj_session = Session() jobs = session.query(Job).all()
jobs = cj_session.query(Job).all()
for job in filter(lambda job: job.status == 'submitted', jobs): for job in filter(lambda job: job.status == 'submitted', jobs):
__create_job_service(job) __create_job_service(job)
for job in filter(lambda job: (job.status == 'queued'), jobs): for job in filter(lambda job: job.status == 'queued', jobs):
__checkout_job_service(job, cj_session) __checkout_job_service(job, session)
__add_notification_data(job, 'queued', cj_session) __add_notification_data(job, 'queued', session)
for job in filter(lambda job: (job.status == 'running'), jobs): for job in filter(lambda job: job.status == 'running', jobs):
__checkout_job_service(job, cj_session) __checkout_job_service(job, session)
__add_notification_data(job, 'running', cj_session) __add_notification_data(job, 'running', session)
for job in filter(lambda job: job.status == 'complete', jobs): for job in filter(lambda job: job.status == 'complete', jobs):
__add_notification_data(job, 'complete', cj_session) __add_notification_data(job, 'complete', session)
for job in filter(lambda job: job.status == 'failed', jobs): for job in filter(lambda job: job.status == 'failed', jobs):
__add_notification_data(job, 'failed', cj_session) __add_notification_data(job, 'failed', session)
for job in filter(lambda job: job.status == 'canceling', jobs): for job in filter(lambda job: job.status == 'canceling', jobs):
__remove_job_service(job) __remove_job_service(job)
cj_session.commit() session.commit()
Session.remove() Session.remove()
def __add_notification_data(job, notified_on_status, scoped_session): def __add_notification_data(job, notified_on_status, session):
logger = init_logger()
# checks if user wants any notifications at all # checks if user wants any notifications at all
if (job.user.setting_job_status_mail_notifications == 'none'): if (job.user.setting_job_status_mail_notifications == 'none'):
# logger.warning('User does not want any notifications!')
return return
# checks if user wants only notification on completed jobs # checks if user wants only notification on completed jobs
elif (job.user.setting_job_status_mail_notifications == 'end' elif (job.user.setting_job_status_mail_notifications == 'end'
and notified_on_status != 'complete'): and notified_on_status != 'complete'):
# logger.warning('User only wants notifications on job completed!')
return return
else: else:
# check if a job already has associated NotificationData # check if a job already has associated NotificationData
@ -48,27 +44,21 @@ def __add_notification_data(job, notified_on_status, scoped_session):
# create notification_data for current job if there is none # create notification_data for current job if there is none
if (notification_exists == 0): if (notification_exists == 0):
notification_data = NotificationData(job_id=job.id) notification_data = NotificationData(job_id=job.id)
scoped_session.add(notification_data) session.add(notification_data)
scoped_session.commit()
# If no commit job will have no NotificationData # If no commit job will have no NotificationData
# logger.warning('Created NotificationData for current Job.')) session.commit()
else:
pass
# logger.warning('Job already had notification: {}'.format(notification_exists))
if (job.notification_data[0].notified_on != notified_on_status): if (job.notification_data[0].notified_on != notified_on_status):
notification_email_data = NotificationEmailData(job_id=job.id) notification_email_data = NotificationEmailData(job_id=job.id)
notification_email_data.notify_status = notified_on_status notification_email_data.notify_status = notified_on_status
notification_email_data.creation_date = datetime.utcnow() notification_email_data.creation_date = datetime.utcnow()
job.notification_data[0].notified_on = notified_on_status job.notification_data[0].notified_on = notified_on_status
scoped_session.add(notification_email_data) session.add(notification_email_data)
logger.warning('Created NotificationEmailData for current Job.')
else:
# logger.warning('NotificationEmailData has already been created for current Job!')
pass
def __create_job_service(job): def __create_job_service(job):
job_dir = os.path.join(NOPAQUE_STORAGE, str(job.user_id), 'jobs', job_dir = os.path.join(config.DATA_DIR,
str(job.user_id),
'jobs',
str(job.id)) str(job.id))
cmd = '{} -i /files -o /files/output'.format(job.service) cmd = '{} -i /files -o /files/output'.format(job.service)
if job.service == 'file-setup': if job.service == 'file-setup':
@ -105,15 +95,14 @@ def __create_job_service(job):
job.status = 'queued' job.status = 'queued'
def __checkout_job_service(job, scoped_session): def __checkout_job_service(job, session):
logger = init_logger()
service_name = 'job_{}'.format(job.id) service_name = 'job_{}'.format(job.id)
try: try:
service = docker_client.services.get(service_name) service = docker_client.services.get(service_name)
except docker.errors.NotFound: except docker.errors.NotFound:
logger.error('__checkout_job_service({}):'.format(job.id) logging.error('__checkout_job_service({}): '.format(job.id)
+ 'The service does not exist. ' + 'The service does not exist. '
+ ' (stauts: {} -> failed)'.format(job.status)) + '(status: {} -> failed)'.format(job.status))
job.status = 'failed' job.status = 'failed'
return return
except docker.errors.DockerException: except docker.errors.DockerException:
@ -130,14 +119,18 @@ def __checkout_job_service(job, scoped_session):
job.end_date = datetime.utcnow() job.end_date = datetime.utcnow()
job.status = task_state job.status = task_state
if task_state == 'complete': if task_state == 'complete':
results_dir = os.path.join(NOPAQUE_STORAGE, str(job.user_id), results_dir = os.path.join(config.DATA_DIR,
'jobs', str(job.id), 'output') str(job.user_id),
'jobs',
str(job.id),
'output')
results = filter(lambda x: x.endswith('.zip'), results = filter(lambda x: x.endswith('.zip'),
os.listdir(results_dir)) os.listdir(results_dir))
for result in results: for result in results:
job_result = JobResult(dir=results_dir, filename=result, job_result = JobResult(dir=results_dir,
filename=result,
job_id=job.id) job_id=job.id)
scoped_session.add(job_result) session.add(job_result)
def __remove_job_service(job): def __remove_job_service(job):

View File

@ -1,29 +1,44 @@
from notify.notification import Notification from notify.notification import Notification
from notify.service import NotificationService from notify.service import NotificationService
from sqlalchemy import asc from sqlalchemy import asc
from tasks import Session from . import config, Session
from tasks.decorators import background from .decorators import background
from tasks.Models import NotificationEmailData from .models import NotificationEmailData
import os import logging
import smtplib
@background @background
def notify(execute_flag): def notify():
# If True mails are sent normaly session = Session()
# If False mails are not sent. Used to avoid sending mails for jobs that if config.SMTP_USE_SSL:
# have been completed a long time ago. Use this if you implement notify smtp = smtplib.SMTP_SSL(host=config.SMTP_SERVER, port=config.SMTP_PORT)
# into an already existing nopaque instance. Change it to True after the else:
# daemon has run one time with the flag set to False. smtp = smtplib.SMTP(host=config.SMTP_SERVER, port=config.SMTP_PORT)
# Initialize notification service if config.SMTP_USE_TLS:
notification_service = NotificationService(execute_flag) smtp.starttls()
notification_service.get_smtp_configs() try:
notification_service.set_server() smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD)
except smtplib.SMTPHeloError:
logging.warning('The server didnt reply properly to the HELO '
'greeting.')
return
except smtplib.SMTPAuthenticationError as e:
logging.warning('The server didnt accept the username/password '
'combination.')
logging.warning(e)
return
except smtplib.SMTPNotSupportedError:
logging.warning('The AUTH command is not supported by the server.')
return
except smtplib.SMTPException:
logging.warning('No suitable authentication method was found.')
return
notification_service = NotificationService(smtp)
# create notifications (content, recipient etc.) # create notifications (content, recipient etc.)
notifications = __create_mail_notifications(notification_service) notifications = __create_mail_notifications(notification_service, session)
# only login and send mails if there are any notifications # only login and send mails if there are any notifications
if (len(notifications) > 0): if (len(notifications) > 0):
try:
notification_service.login()
# combine new and unsent notifications # combine new and unsent notifications
notifications.update(notification_service.not_sent) notifications.update(notification_service.not_sent)
# send all notifications # send all notifications
@ -32,27 +47,25 @@ def notify(execute_flag):
# but only if mail limit has not been exceeded # but only if mail limit has not been exceeded
if (notification_service.mail_limit_exceeded is not True): if (notification_service.mail_limit_exceeded is not True):
notification_service.not_sent = {} notification_service.not_sent = {}
notification_service.quit() smtp.quit()
except Exception as e: Session.remove()
notification_service.not_sent.update(notifications)
notification_service.quit()
# Email notification functions # Email notification functions
def __create_mail_notifications(notification_service): def __create_mail_notifications(notification_service, session):
mn_session = Session() notification_email_data = session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all() # noqa
notification_email_data = mn_session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all()
notifications = {} notifications = {}
for data in notification_email_data: for data in notification_email_data:
notification = Notification() notification = Notification()
notification.set_addresses(notification_service.email_address, notification.set_addresses(config.SMTP_DEFAULT_SENDER,
data.job.user.email) data.job.user.email)
subject_template = '[nopaque] Status update for your Job/Corpora: {title}!' subject_template = ('[nopaque] Status update for your Job/Corpora: '
'{title}!')
subject_template_values_dict = {'title': data.job.title} subject_template_values_dict = {'title': data.job.title}
protocol = os.environ.get('NOPAQUE_PROTOCOL') url = '{}://{}/{}/{}'.format(config.PROTOCOL,
domain = os.environ.get('NOPAQUE_DOMAIN') config.DOMAIN,
url = '{protocol}://{domain}/{jobs}/{id}'.format( 'jobs',
protocol=protocol, domain=domain, jobs='jobs', id=data.job.id) data.job.id)
body_template_values_dict = {'username': data.job.user.username, body_template_values_dict = {'username': data.job.user.username,
'id': data.job.id, 'id': data.job.id,
'title': data.job.title, 'title': data.job.title,
@ -72,9 +85,8 @@ def __create_mail_notifications(notification_service):
# get swamped with mails for queued, running and complete if those # get swamped with mails for queued, running and complete if those
# happen in in a few seconds. Only the last update will be sent. # happen in in a few seconds. Only the last update will be sent.
# This depends on the sleep time interval though. # This depends on the sleep time interval though.
mn_session.delete(data) session.delete(data)
mn_session.commit() session.commit()
Session.remove()
return notifications return notifications
@ -83,8 +95,10 @@ def __send_mail_notifications(notifications, notification_service):
try: try:
notification_service.send(notification) notification_service.send(notification)
notification_service.mail_limit_exceeded = False notification_service.mail_limit_exceeded = False
except Exception as e: except Exception:
# Adds notifications to unsent if mail server exceded limit for # Adds notifications to unsent if mail server exceded limit for
# consecutive mail sending # consecutive mail sending
logging.warning('limit')
notification_service.not_sent[key] = notification notification_service.not_sent[key] = notification
notification_service.mail_limit_exceeded = True notification_service.mail_limit_exceeded = True
notification_service.not_sent.update(notifications)

View File

@ -0,0 +1,25 @@
version: "3.5"
services:
nopaque:
ports:
- "5000:5000"
volumes:
# Mount code as volumes
- "./web/app:/home/nopaque/app"
- "./web/boot.sh:/home/nopaque/boot.sh"
- "./web/config.py:/home/nopaque/config.py"
- "./web/migrations:/home/nopaque/migrations"
- "./web/nopaque.py:/home/nopaque/nopaque.py"
- "./web/requirements.txt:/home/nopaque/requirements.txt"
- "./web/tests:/home/nopaque/tests"
nopaqued:
volumes:
# Mount code as volumes
- "./daemon/boot.sh:/home/nopaqued/boot.sh"
- "./daemon/config.py:/home/nopaqued/config.py"
- "./daemon/logger:/home/nopaqued/logger"
- "./daemon/nopaqued.py:/home/nopaqued/nopaqued.py"
- "./daemon/notify:/home/nopaqued/notify"
- "./daemon/requirements.txt:/home/nopaqued/requirements.txt"
- "./daemon/tasks:/home/nopaqued/tasks"

View File

@ -1,51 +0,0 @@
version: "3.5"
networks:
reverse-proxy:
external:
name: reverse-proxy
services:
web:
labels:
- "traefik.docker.network=reverse-proxy"
- "traefik.enable=true"
### <http> ###
- "traefik.http.middlewares.nopaque-header.headers.customrequestheaders.X-Forwarded-Proto=http"
- "traefik.http.routers.nopaque.entrypoints=web"
- "traefik.http.routers.nopaque.middlewares=nopaque-header, redirect-to-https@file"
- "traefik.http.routers.nopaque.rule=Host(`${NOPAQUE_DOMAIN}`)"
### </http> ###
### <https> ###
- "traefik.http.middlewares.nopaque-secure-header.headers.customrequestheaders.X-Forwarded-Proto=https"
- "traefik.http.routers.nopaque-secure.entrypoints=web-secure"
- "traefik.http.routers.nopaque-secure.middlewares=hsts-header@file, nopaque-secure-header"
- "traefik.http.routers.nopaque-secure.rule=Host(`${NOPAQUE_DOMAIN}`)"
- "traefik.http.routers.nopaque-secure.tls.options=intermediate@file"
### </https> ###
### <basicauth help="https://docs.traefik.io/middlewares/basicauth/"> ###
# - "traefik.http.middlewares.nopaque-basicauth.basicauth.users=<USERNAME>:<PASSWORD>"
# - "traefik.http.routers.nopaque.middlewares=nopaque-basicauth, nopaque-header, redirect-to-https@file"
# - "traefik.http.routers.nopaque-secure.middlewares=nopaque-basicauth, hsts-header@file, nopaque-secure-header"
### </basicauth> ###
networks:
- default
- reverse-proxy
volumes:
# Mount code as volumes
- "./web/app:/home/nopaque/app"
- "./web/migrations:/home/nopaque/migrations"
- "./web/tests:/home/nopaque/tests"
- "./web/config.py:/home/nopaque/config.py"
- "./web/docker-entrypoint.sh:/home/nopaque/docker-entrypoint.sh"
- "./web/nopaque.py:/home/nopaque/nopaque.py"
- "./web/requirements.txt:/home/nopaque/requirements.txt"
daemon:
volumes:
# Mount code as volumes
- "./daemon/logger:/home/nopaqued/logger"
- "./daemon/notify:/home/nopaqued/notify"
- "./daemon/tasks:/home/nopaqued/tasks"
- "./daemon/docker-entrypoint.sh:/home/nopaqued/docker-entrypoint.sh"
- "./daemon/nopaqued.py:/home/nopaqued/nopaqued.py"
- "./daemon/requirements.txt:/home/nopaqued/requirements.txt"

View File

@ -0,0 +1,30 @@
################################################################################
# Don't forget to set the NOPAQUE_NUM_PROXIES variable in your .env #
################################################################################
version: "3.5"
networks:
reverse-proxy:
external:
name: reverse-proxy
services:
nopaque:
labels:
- "traefik.docker.network=reverse-proxy"
- "traefik.enable=true"
### <http> ###
- "traefik.http.routers.nopaque.entrypoints=web"
- "traefik.http.routers.nopaque.middlewares=redirect-to-https@file"
- "traefik.http.routers.nopaque.rule=Host(`${NOPAQUE_DOMAIN:-localhost}`)"
### </http> ###
### <https> ###
- "traefik.http.routers.nopaque-secure.entrypoints=web-secure"
- "traefik.http.routers.nopaque-secure.middlewares=hsts-header@file"
- "traefik.http.routers.nopaque-secure.rule=Host(`${NOPAQUE_DOMAIN:-localhost}`)"
- "traefik.http.routers.nopaque-secure.tls.certresolver=<CERTRESOLVER>"
- "traefik.http.routers.nopaque-secure.tls.options=intermediate@file"
### </https> ###
networks:
- default
- reverse-proxy

View File

@ -1,49 +1,49 @@
version: "3.5" version: "3.5"
volumes:
redis-trash1:
services: services:
web: nopaque:
build: build:
args: args:
GID: ${GID} GID: ${HOST_GID}
UID: ${UID} UID: ${HOST_UID}
context: ./web context: ./web
depends_on: depends_on:
- db - db
- redis - mq
env_file: .env env_file: .env
image: nopaque/web image: nopaque/web
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- "./logs:/home/nopaque/logs" - "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}"
- "${NOPAQUE_STORAGE}:${NOPAQUE_STORAGE}" - "${HOST_NOPAQUE_LOG_FILE-./nopaque.log}:${NOPAQUE_LOG_FILE:-/home/nopaque/nopaque.log}"
daemon: nopaqued:
build: build:
args: args:
DOCKER_GID: ${DOCKER_GID} DOCKER_GID: ${HOST_DOCKER_GID}
GID: ${GID} GID: ${HOST_GID}
UID: ${UID} UID: ${HOST_UID}
context: ./daemon context: ./daemon
depends_on: depends_on:
- db - db
- web - nopaque
env_file: .env env_file: .env
image: nopaque/daemon image: nopaque/daemon
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- "/var/run/docker.sock:/var/run/docker.sock" - "/var/run/docker.sock:/var/run/docker.sock"
- "./logs:/home/nopaqued/logs" - "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}"
- "${NOPAQUE_STORAGE}:${NOPAQUE_STORAGE}" - "${HOST_NOPAQUE_DAEMON_LOG_FILE-./nopaqued.log}:${NOPAQUE_DAEMON_LOG_FILE:-/home/nopaqued/nopaqued.log}"
db: db:
env_file: .env environment:
- POSTGRES_DB_NAME=${NOPAQUE_DB_NAME}
- POSTGRES_USER=${NOPAQUE_DB_USERNAME}
- POSTGRES_PASSWORD=${NOPAQUE_DB_PASSWORD}
image: postgres:11 image: postgres:11
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- "/srv/nopaque/db:/var/lib/postgresql/data" - "${HOST_DB_DIR:-./db}:/var/lib/postgresql/data"
redis: mq:
image: redis:6 image: redis:6
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- "redis-trash1:/data" - "${HOST_MQ_DIR:-./mq}:/data"

View File

View File

@ -1,3 +1,6 @@
# Docker related files
Dockerfile Dockerfile
.dockerignore .dockerignore
*.bak
# Packages
__pycache__

View File

@ -1,7 +1,7 @@
FROM python:3.6-slim-stretch FROM python:3.6-slim-stretch
LABEL maintainer="inf_sfb1288@lists.uni-bielefeld.de" LABEL authors="Patrick Jentsch <p.jentsch@uni-bielefeld.de>, Stephan Porada <sporada@uni-bielefeld.de>"
ARG UID ARG UID
@ -18,7 +18,7 @@ RUN apt-get update \
build-essential \ build-essential \
libpq-dev \ libpq-dev \
wait-for-it \ wait-for-it \
&& rm -rf /var/lib/apt/lists/* && rm -r /var/lib/apt/lists/*
RUN groupadd --gid ${GID} --system nopaque \ RUN groupadd --gid ${GID} --system nopaque \
@ -33,4 +33,4 @@ RUN python -m venv venv \
&& mkdir logs && mkdir logs
ENTRYPOINT ["./docker-entrypoint.sh"] ENTRYPOINT ["./boot.sh"]

View File

@ -1,15 +1,14 @@
from config import config from config import Config
from flask import Flask from flask import Flask
from flask_login import LoginManager from flask_login import LoginManager
from flask_mail import Mail from flask_mail import Mail
from flask_paranoid import Paranoid from flask_paranoid import Paranoid
from flask_socketio import SocketIO from flask_socketio import SocketIO
from flask_sqlalchemy import SQLAlchemy from flask_sqlalchemy import SQLAlchemy
import logging
config = Config()
db = SQLAlchemy() db = SQLAlchemy()
logger = logging.getLogger(__name__)
login_manager = LoginManager() login_manager = LoginManager()
login_manager.login_view = 'auth.login' login_manager.login_view = 'auth.login'
mail = Mail() mail = Mail()
@ -18,44 +17,36 @@ paranoid.redirect_view = '/'
socketio = SocketIO() socketio = SocketIO()
def create_app(config_name): def create_app():
app = Flask(__name__) app = Flask(__name__)
app.config.from_object(config[config_name]) app.config.from_object(config)
config[config_name].init_app(app) config.init_app(app)
db.init_app(app) db.init_app(app)
login_manager.init_app(app) login_manager.init_app(app)
mail.init_app(app) mail.init_app(app)
paranoid.init_app(app) paranoid.init_app(app)
socketio.init_app(app, message_queue='redis://redis:6379/') socketio.init_app(app, message_queue=config.SOCKETIO_MESSAGE_QUEUE_URI)
from . import events from . import events
from .admin import admin as admin_blueprint from .admin import admin as admin_blueprint
app.register_blueprint(admin_blueprint, url_prefix='/admin') app.register_blueprint(admin_blueprint, url_prefix='/admin')
from .auth import auth as auth_blueprint from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth') app.register_blueprint(auth_blueprint, url_prefix='/auth')
from .content import content as content_blueprint from .content import content as content_blueprint
app.register_blueprint(content_blueprint, url_prefix='/content') app.register_blueprint(content_blueprint, url_prefix='/content')
from .corpora import corpora as corpora_blueprint from .corpora import corpora as corpora_blueprint
app.register_blueprint(corpora_blueprint, url_prefix='/corpora') app.register_blueprint(corpora_blueprint, url_prefix='/corpora')
from .jobs import jobs as jobs_blueprint from .jobs import jobs as jobs_blueprint
app.register_blueprint(jobs_blueprint, url_prefix='/jobs') app.register_blueprint(jobs_blueprint, url_prefix='/jobs')
from .main import main as main_blueprint from .main import main as main_blueprint
app.register_blueprint(main_blueprint) app.register_blueprint(main_blueprint)
from .profile import profile as profile_blueprint from .profile import profile as profile_blueprint
app.register_blueprint(profile_blueprint, url_prefix='/profile') app.register_blueprint(profile_blueprint, url_prefix='/profile')
from .query_results import query_results as query_results_blueprint from .query_results import query_results as query_results_blueprint
app.register_blueprint(query_results_blueprint, app.register_blueprint(query_results_blueprint,
url_prefix='/query_results') url_prefix='/query_results')
from .services import services as services_blueprint from .services import services as services_blueprint
app.register_blueprint(services_blueprint, url_prefix='/services') app.register_blueprint(services_blueprint, url_prefix='/services')

View File

@ -65,7 +65,7 @@ def register():
username=registration_form.username.data) username=registration_form.username.data)
db.session.add(user) db.session.add(user)
db.session.commit() db.session.commit()
user_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], user_dir = os.path.join(current_app.config['DATA_DIR'],
str(user.id)) str(user.id))
if os.path.exists(user_dir): if os.path.exists(user_dir):
shutil.rmtree(user_dir) shutil.rmtree(user_dir)

View File

@ -9,8 +9,6 @@ import cqi
import math import math
from datetime import datetime from datetime import datetime
import time
from app import logger
''' '''
' A dictionary containing lists of, with corpus ids associated, Socket.IO ' A dictionary containing lists of, with corpus ids associated, Socket.IO
@ -41,7 +39,8 @@ def corpus_analysis_get_meta_data(corpus_id):
metadata['corpus_name'] = db_corpus.title metadata['corpus_name'] = db_corpus.title
metadata['corpus_description'] = db_corpus.description metadata['corpus_description'] = db_corpus.description
metadata['corpus_creation_date'] = db_corpus.creation_date.isoformat() metadata['corpus_creation_date'] = db_corpus.creation_date.isoformat()
metadata['corpus_last_edited_date'] = db_corpus.last_edited_date.isoformat() metadata['corpus_last_edited_date'] = \
db_corpus.last_edited_date.isoformat()
client = corpus_analysis_clients.get(request.sid) client = corpus_analysis_clients.get(request.sid)
if client is None: if client is None:
response = {'code': 424, 'desc': 'No client found for this session', response = {'code': 424, 'desc': 'No client found for this session',
@ -61,18 +60,20 @@ def corpus_analysis_get_meta_data(corpus_id):
metadata['corpus_size_tokens'] = client_corpus.attrs['size'] metadata['corpus_size_tokens'] = client_corpus.attrs['size']
text_attr = client_corpus.structural_attributes.get('text') text_attr = client_corpus.structural_attributes.get('text')
struct_attrs = client_corpus.structural_attributes.list(filters={'part_of': text_attr}) struct_attrs = client_corpus.structural_attributes.list(
filters={'part_of': text_attr})
text_ids = range(0, (text_attr.attrs['size'])) text_ids = range(0, (text_attr.attrs['size']))
texts_metadata = {} texts_metadata = {}
for text_id in text_ids: for text_id in text_ids:
texts_metadata[text_id] = {} texts_metadata[text_id] = {}
for struct_attr in struct_attrs: for struct_attr in struct_attrs:
texts_metadata[text_id][struct_attr.attrs['name'][(len(text_attr.attrs['name']) + 1):]] = struct_attr.values_by_ids(list(range(struct_attr.attrs['size'])))[text_id] texts_metadata[text_id][struct_attr.attrs['name'][(len(text_attr.attrs['name']) + 1):]] = struct_attr.values_by_ids(list(range(struct_attr.attrs['size'])))[text_id] # noqa
metadata['corpus_all_texts'] = texts_metadata metadata['corpus_all_texts'] = texts_metadata
metadata['corpus_analysis_date'] = datetime.utcnow().isoformat() metadata['corpus_analysis_date'] = datetime.utcnow().isoformat()
metadata['corpus_cqi_py_protocol_version'] = client.api.version metadata['corpus_cqi_py_protocol_version'] = client.api.version
metadata['corpus_cqi_py_package_version'] = cqi.__version__ metadata['corpus_cqi_py_package_version'] = cqi.__version__
metadata['corpus_cqpserver_version'] = 'CQPserver v3.4.22' # TODO: make this dynamically # TODO: make this dynamically
metadata['corpus_cqpserver_version'] = 'CQPserver v3.4.22'
# write some metadata to the db # write some metadata to the db
db_corpus.current_nr_of_tokens = metadata['corpus_size_tokens'] db_corpus.current_nr_of_tokens = metadata['corpus_size_tokens']
@ -133,7 +134,7 @@ def corpus_analysis_query(query):
if (results.attrs['size'] == 0): if (results.attrs['size'] == 0):
progress = 100 progress = 100
else: else:
progress = ((chunk_start + chunk_size) / results.attrs['size']) * 100 progress = ((chunk_start + chunk_size) / results.attrs['size']) * 100 # noqa
progress = min(100, int(math.ceil(progress))) progress = min(100, int(math.ceil(progress)))
response = {'code': 200, 'desc': None, 'msg': 'OK', response = {'code': 200, 'desc': None, 'msg': 'OK',
'payload': {'chunk': chunk, 'progress': progress}} 'payload': {'chunk': chunk, 'progress': progress}}
@ -202,7 +203,9 @@ def corpus_analysis_get_match_with_full_context(payload):
'payload': payload, 'payload': payload,
'type': type, 'type': type,
'data_indexes': data_indexes} 'data_indexes': data_indexes}
socketio.emit('corpus_analysis_get_match_with_full_context', response, room=request.sid) socketio.emit('corpus_analysis_get_match_with_full_context',
response,
room=request.sid)
client.status = 'ready' client.status = 'ready'

View File

@ -21,7 +21,7 @@ def add_corpus():
status='unprepared', title=add_corpus_form.title.data) status='unprepared', title=add_corpus_form.title.data)
db.session.add(corpus) db.session.add(corpus)
db.session.commit() db.session.commit()
dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], dir = os.path.join(current_app.config['DATA_DIR'],
str(corpus.user_id), 'corpora', str(corpus.id)) str(corpus.user_id), 'corpora', str(corpus.id))
try: try:
os.makedirs(dir) os.makedirs(dir)
@ -109,7 +109,7 @@ def add_corpus_file(corpus_id):
# Save the file # Save the file
dir = os.path.join(str(corpus.user_id), 'corpora', str(corpus.id)) dir = os.path.join(str(corpus.user_id), 'corpora', str(corpus.id))
add_corpus_file_form.file.data.save( add_corpus_file_form.file.data.save(
os.path.join(current_app.config['NOPAQUE_STORAGE'], dir, os.path.join(current_app.config['DATA_DIR'], dir,
add_corpus_file_form.file.data.filename)) add_corpus_file_form.file.data.filename))
corpus_file = CorpusFile( corpus_file = CorpusFile(
address=add_corpus_file_form.address.data, address=add_corpus_file_form.address.data,
@ -163,7 +163,7 @@ def download_corpus_file(corpus_id, corpus_file_id):
if not (corpus_file.corpus.creator == current_user if not (corpus_file.corpus.creator == current_user
or current_user.is_administrator()): or current_user.is_administrator()):
abort(403) abort(403)
dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], dir = os.path.join(current_app.config['DATA_DIR'],
corpus_file.dir) corpus_file.dir)
return send_from_directory(as_attachment=True, directory=dir, return send_from_directory(as_attachment=True, directory=dir,
filename=corpus_file.filename) filename=corpus_file.filename)

View File

@ -1,15 +1,11 @@
from flask import current_app, render_template from flask import render_template
from flask_mail import Message from flask_mail import Message
from . import mail from . import mail
from .decorators import background from .decorators import background
def create_message(recipient, subject, template, **kwargs): def create_message(recipient, subject, template, **kwargs):
app = current_app._get_current_object() msg = Message('[nopaque] {}'.format(subject), recipients=[recipient])
sender = app.config['NOPAQUE_MAIL_SENDER']
subject_prefix = app.config['NOPAQUE_MAIL_SUBJECT_PREFIX']
msg = Message('{} {}'.format(subject_prefix, subject),
recipients=[recipient], sender=sender)
msg.body = render_template('{}.txt.j2'.format(template), **kwargs) msg.body = render_template('{}.txt.j2'.format(template), **kwargs)
msg.html = render_template('{}.html.j2'.format(template), **kwargs) msg.html = render_template('{}.html.j2'.format(template), **kwargs)
return msg return msg

View File

@ -44,7 +44,7 @@ def download_job_input(job_id, job_input_id):
if not (job_input.job.creator == current_user if not (job_input.job.creator == current_user
or current_user.is_administrator()): or current_user.is_administrator()):
abort(403) abort(403)
dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], dir = os.path.join(current_app.config['DATA_DIR'],
job_input.dir) job_input.dir)
return send_from_directory(as_attachment=True, directory=dir, return send_from_directory(as_attachment=True, directory=dir,
filename=job_input.filename) filename=job_input.filename)
@ -72,7 +72,7 @@ def download_job_result(job_id, job_result_id):
if not (job_result.job.creator == current_user if not (job_result.job.creator == current_user
or current_user.is_administrator()): or current_user.is_administrator()):
abort(403) abort(403)
dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], dir = os.path.join(current_app.config['DATA_DIR'],
job_result.dir) job_result.dir)
return send_from_directory(as_attachment=True, directory=dir, return send_from_directory(as_attachment=True, directory=dir,
filename=job_result.filename) filename=job_result.filename)

View File

@ -1,12 +0,0 @@
from flask_wtf import FlaskForm
from wtforms import DecimalField, StringField, SubmitField, TextAreaField
from wtforms.validators import DataRequired, Email, Length, NumberRange
class FeedbackForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
feedback = TextAreaField('Feedback', validators=[Length(0, 255)])
like_range = DecimalField('How would you rate nopaque?',
validators=[DataRequired(),
NumberRange(min=1, max=10)])
submit = SubmitField('Send feedback')

View File

@ -1,8 +1,6 @@
from flask import flash, redirect, render_template, url_for from flask import flash, redirect, render_template, url_for
from flask_login import login_required, login_user from flask_login import login_required, login_user
from . import main from . import main
from .forms import FeedbackForm
from .. import logger
from ..auth.forms import LoginForm from ..auth.forms import LoginForm
from ..models import User from ..models import User
@ -28,18 +26,6 @@ def dashboard():
return render_template('main/dashboard.html.j2', title='Dashboard') return render_template('main/dashboard.html.j2', title='Dashboard')
@main.route('/feedback', methods=['GET', 'POST'])
@login_required
def feedback():
feedback_form = FeedbackForm(prefix='feedback-form')
if feedback_form.validate_on_submit():
logger.warning(feedback_form.email)
logger.warning(feedback_form.feedback)
logger.warning(feedback_form.like_range)
return render_template('main/feedback.html.j2',
feedback_form=feedback_form, title='Feedback')
@main.route('/poster', methods=['GET', 'POST']) @main.route('/poster', methods=['GET', 'POST'])
def poster(): def poster():
login_form = LoginForm(prefix='login-form') login_form = LoginForm(prefix='login-form')

View File

@ -166,7 +166,7 @@ class User(UserMixin, db.Model):
def __init__(self, **kwargs): def __init__(self, **kwargs):
super(User, self).__init__(**kwargs) super(User, self).__init__(**kwargs)
if self.role is None: if self.role is None:
if self.email == current_app.config['NOPAQUE_ADMIN']: if self.email == current_app.config['ADMIN_EMAIL_ADRESS']:
self.role = Role.query.filter_by(name='Administrator').first() self.role = Role.query.filter_by(name='Administrator').first()
if self.role is None: if self.role is None:
self.role = Role.query.filter_by(default=True).first() self.role = Role.query.filter_by(default=True).first()
@ -251,7 +251,7 @@ class User(UserMixin, db.Model):
''' '''
Delete the user and its corpora and jobs from database and filesystem. Delete the user and its corpora and jobs from database and filesystem.
''' '''
user_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], user_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.id)) str(self.id))
shutil.rmtree(user_dir, ignore_errors=True) shutil.rmtree(user_dir, ignore_errors=True)
db.session.delete(self) db.session.delete(self)
@ -383,7 +383,7 @@ class Job(db.Model):
db.session.commit() db.session.commit()
sleep(1) sleep(1)
db.session.refresh(self) db.session.refresh(self)
job_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], job_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id), str(self.user_id),
'jobs', 'jobs',
str(self.id)) str(self.id))
@ -397,7 +397,7 @@ class Job(db.Model):
if self.status != 'failed': if self.status != 'failed':
raise Exception('Could not restart job: status is not "failed"') raise Exception('Could not restart job: status is not "failed"')
job_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], job_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id), str(self.user_id),
'jobs', 'jobs',
str(self.id)) str(self.id))
@ -508,7 +508,7 @@ class CorpusFile(db.Model):
title = db.Column(db.String(255)) title = db.Column(db.String(255))
def delete(self): def delete(self):
corpus_file_path = os.path.join(current_app.config['NOPAQUE_STORAGE'], corpus_file_path = os.path.join(current_app.config['DATA_DIR'],
str(self.corpus.user_id), str(self.corpus.user_id),
'corpora', 'corpora',
str(self.corpus_id), str(self.corpus_id),
@ -570,7 +570,7 @@ class Corpus(db.Model):
'files': {file.id: file.to_dict() for file in self.files}} 'files': {file.id: file.to_dict() for file in self.files}}
def build(self): def build(self):
corpus_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], corpus_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id), str(self.user_id),
'corpora', 'corpora',
str(self.id)) str(self.id))
@ -606,7 +606,7 @@ class Corpus(db.Model):
self.status = 'submitted' self.status = 'submitted'
def delete(self): def delete(self):
corpus_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], corpus_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id), str(self.user_id),
'corpora', 'corpora',
str(self.id)) str(self.id))
@ -636,7 +636,7 @@ class QueryResult(db.Model):
title = db.Column(db.String(32)) title = db.Column(db.String(32))
def delete(self): def delete(self):
query_result_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], query_result_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id), str(self.user_id),
'query_results', 'query_results',
str(self.id)) str(self.id))

View File

@ -31,7 +31,7 @@ def add_query_result():
db.session.add(query_result) db.session.add(query_result)
db.session.commit() db.session.commit()
# create paths to save the uploaded json file # create paths to save the uploaded json file
query_result_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], query_result_dir = os.path.join(current_app.config['DATA_DIR'],
str(current_user.id), str(current_user.id),
'query_results', 'query_results',
str(query_result.id)) str(query_result.id))
@ -106,7 +106,7 @@ def inspect_query_result(query_result_id):
prefix='inspect-display-options-form' prefix='inspect-display-options-form'
) )
query_result_file_path = os.path.join( query_result_file_path = os.path.join(
current_app.config['NOPAQUE_STORAGE'], current_app.config['DATA_DIR'],
str(current_user.id), str(current_user.id),
'query_results', 'query_results',
str(query_result.id), str(query_result.id),
@ -141,7 +141,7 @@ def download_query_result(query_result_id):
if not (query_result.creator == current_user if not (query_result.creator == current_user
or current_user.is_administrator()): or current_user.is_administrator()):
abort(403) abort(403)
query_result_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], query_result_dir = os.path.join(current_app.config['DATA_DIR'],
str(current_user.id), str(current_user.id),
'query_results', 'query_results',
str(query_result.id)) str(query_result.id))

View File

@ -55,7 +55,7 @@ def service(service):
db.session.add(job) db.session.add(job)
db.session.commit() db.session.commit()
relative_dir = os.path.join(str(job.user_id), 'jobs', str(job.id)) relative_dir = os.path.join(str(job.user_id), 'jobs', str(job.id))
absolut_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'], absolut_dir = os.path.join(current_app.config['DATA_DIR'],
relative_dir) relative_dir)
try: try:
os.makedirs(absolut_dir) os.makedirs(absolut_dir)

View File

@ -1,35 +0,0 @@
{% extends "nopaque.html.j2" %}
{% block page_content %}
<div class="col s12">
<div class="card">
<form method="POST">
{{ feedback_form.hidden_tag() }}
<div class="card-content">
<p class="range-field">
{{ feedback_form.like_range.label }}
{{ feedback_form.like_range(class='validate', type='range', min=1, max=10) }}
</p>
<div class="input-field">
<i class="material-icons prefix">email</i>
{{ feedback_form.email(class='validate', type='email') }}
{{ feedback_form.email.label }}
{% for error in feedback_form.email.errors %}
<span class="helper-text red-text">{{ error }}</span>
{% endfor %}
</div>
<div class="input-field">
<i class="material-icons prefix">mode_edit</i>
{{ feedback_form.feedback(class='materialize-textarea', data_length=255) }}
{{ feedback_form.feedback.label }}
</div>
</div>
<div class="card-action right-align">
{{ M.render_field(feedback_form.submit, material_icon='send') }}
</div>
</form>
</div>
</div>
{% endblock %}

View File

@ -1,202 +0,0 @@
{% extends "nopaque.html.j2" %}
{% set parallax = True %}
{% block page_content %}
<style>
{% if request.args.get('print') == 'True' %}
html {
/* DIN 0 bei 150dpi */
width: 4697;
height: 7022px;
}
div.navbar-fixed {
transform: scale(3);
transform-origin: 0 0;
}
footer.page-footer {
transform: scale(3);
transform-origin: 0 0;
margin-top: 5496px;
}
.print-transform {
transform: scale(3);
transform-origin: 0 0;
}
{% endif %}
.parallax-container {
height: 321px;
}
</style>
<div class="print-transform">
<div class="section">
<div class="row container">
<div class="col s12 m5">
<h1>nopaque</h1>
<p>From text to data to analysis</p>
<p class="light">Patrick Jentsch, Stephan Porada and Helene Schlicht</p>
</div>
<div class="col s12 m7">
<p>&nbsp;</p>
<div class="card">
<div class="card-content">
<div class="row">
<div class="col s3">
<p>&nbsp;</p>
<img class="responsive-img" src="https://www.uni-bielefeld.de/sfb1288/images/Logo_SFB1288_DE_300dpi.png">
</div>
<div class="col s9">
<p>nopaque is a web application that helps to convert heterogeneous textual source material into standard-compliant research data for subsequent analysis. nopaque is designed to accompany your research process.</p>
<p>The web application is developed within the DFG-funded Collaborative Research Center (SFB) 1288 "Practices of Comparison" by the subproject INF "Data Infrastructure and Digital Humanities".</p>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="parallax-container">
<img src="{{ url_for('static', filename='images/parallax_hq/books_antique_book_old.jpg') }}" width="100%" alt="" style="margin-top: -200px;">
</div>
<div class="section white scrollspy" id="information">
<div class="row container">
<div class="col s12">
<div class="row">
<div class="col s12">
<h3>Why you should use nopaque</h3>
<p>nopaque is a custom-built web application for researchers who want to get out more of their images and texts without having to bother about the technical side of things. You can focus on what really interests you, nopaque does the rest.</p>
<p>nopaques utilization of container virtualization guarantees high interoperability, reusability and reproducibility of research results. All processing steps are carried out in containers created on demand, based on static images with fixed software versions including all dependencies.</p>
</div>
<div class="col s12">
<div class="row">
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">flash_on</i>
<p>Speeds up your work</p>
<p class="light">All tools provided by nopaque are carefully selected to provide a complete tool suite without being held up by compatibility issues.</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">cloud</i>
<p>Cloud infrastructure</p>
<p class="light">All computational work is processed within nopaques cloud infrastructure. You don't need to install any software. Great, right?</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">group</i>
<p>User friendly</p>
<p class="light">You can start right away without having to read mile-long manuals. All services come with default settings that make it easy for you to just get going. Also great, right?</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">settings</i>
<p>Meshing processes</p>
<p class="light">No matter where you step in, nopaque facilitates and accompanies your research. Its workflow perfectly ties in with your research process.</p>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="parallax-container">
<img src="{{ url_for('static', filename='images/parallax_hq/concept_document_focus_letter.jpg') }}" width="100%" alt="" style="margin-top: -350px;">
</div>
<div class="section white scrollspy" id="services">
<div class="row container">
<div class="col s12">
<div class="row">
<div class="col s12">
<h3>What nopaque can do for you</h3>
<p>All services and processes are logically linked and built upon each other. You can follow them step by step or directly choose the one that suits your needs best. And while the process is computed in nopaques cloud infrastructure, you can just keep working.</p>
</div>
<div class="col s12">
<br class="hide-on-small-only">
<div class="row">
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">burst_mode</i>
<p>File setup</p>
<p class="light">Digital copies of text based research data (books, letters, etc.) often comprise various files and formats. nopaque converts and merges those files to facilitate further processing and the application of other services.</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">find_in_page</i>
<p>Optical Character Recognition</p>
<p class="light">nopaque converts your image data like photos or scans into text data through OCR making it machine readable. This step enables you to proceed with further computational analysis of your documents.</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">format_textdirection_l_to_r</i>
<p>Natural Language Processing</p>
<p class="light">By means of computational linguistic data processing (tokenization, lemmatization, part-of-speech tagging and named-entity recognition) nopaque extracts additional information from your text.</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">search</i>
<p>Corpus analysis</p>
<p class="light">nopaque lets you create and upload as many text corpora as you want. It makes use of CQP Query Language, which allows for complex search requests with the aid of metadata and NLP tags.</p>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="parallax-container">
<img src="{{ url_for('static', filename='images/parallax_hq/text_data_wide.png') }}" width="100%" alt="" style="margin-top: -450px;">
</div>
<div class="section white scrollspy" id="registration-and-log-in">
<div class="row container">
<div class="col s12">
<div class="row">
<!--
<div class="col s12 m4">
<h3>Registration and Log in</h3>
<p>Want to boost your research and get going? nopaque is free and no download is needed. Register now!</p>
<a class="btn waves-effect waves-light" href="{{ url_for('auth.register') }}"><i class="material-icons left">person_add</i>Register</a>
</div>-->
<div class="col s12">
<div class="card">
<form method="POST">
{{ login_form.hidden_tag() }}
<div class="card-content">
<span class="card-title">Registration and Log in</span>
<div class="input-field">
<i class="material-icons prefix">person</i>
{{ login_form.user(class='validate') }}
{{ login_form.user.label }}
{% for error in login_form.user.errors %}
<span class="helper-text red-text">{{ error }}</span>
{% endfor %}
</div>
<div class="input-field">
<i class="material-icons prefix">vpn_key</i>
{{ login_form.password(class='validate') }}
{{ login_form.password.label }}
{% for error in login_form.password.errors %}
<span class="helper-text red-text">{{ error }}</span>
{% endfor %}
</div>
<div class="row" style="margin-bottom: 0;">
<div class="col s6 left-align">
<a href="{{ url_for('auth.reset_password_request') }}">Forgot your password?</a>
|
<a href="{{ url_for('auth.reset_password_request') }}">No account yet?</a>
</div>
<div class="col s6 right-align">
{{ materialize.submit_button(login_form.submit) }}
</div>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
{% endblock %}

21
web/boot.sh Executable file
View File

@ -0,0 +1,21 @@
#!/bin/bash
echo "Waiting for db..."
wait-for-it "${NOPAQUE_DB_HOST}:${NOPAQUE_DB_PORT:-5432}" --strict --timeout=0
echo "Waiting for mq..."
wait-for-it "${NOPAQUE_MQ_HOST}:${NOPAQUE_MQ_PORT}" --strict --timeout=0
source venv/bin/activate
if [ "$#" -eq 0 ]; then
flask deploy
python nopaque.py
elif [[ "$1" == "flask" ]]; then
exec ${@:1}
else
echo "$0 [COMMAND]"
echo ""
echo "nopaque startup script"
echo ""
echo "Management Commands:"
echo " flask"
fi

View File

@ -1,85 +1,97 @@
from werkzeug.middleware.proxy_fix import ProxyFix from werkzeug.middleware.proxy_fix import ProxyFix
import os
import logging import logging
import os
root_dir = os.path.abspath(os.path.dirname(__file__))
DEFAULT_DATA_DIR = os.path.join('/mnt/data')
DEFAULT_DB_PORT = '5432'
DEFAULT_DEBUG = 'False'
DEFAULT_LOG_DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
DEFAULT_LOG_FILE = os.path.join(root_dir, 'nopaque.log')
DEFAULT_LOG_FORMAT = ('[%(asctime)s] %(levelname)s in %(pathname)s '
'(function: %(funcName)s, line: %(lineno)d): '
'%(message)s')
DEFAULT_LOG_LEVEL = 'ERROR'
DEFAULT_SMTP_USE_SSL = 'False'
DEFAULT_SMTP_USE_TLS = 'False'
DEFAULT_NUM_PROXIES = '0'
DEFAULT_PROTOCOL = 'http'
DEFAULT_RESSOURCES_PER_PAGE = '5'
DEFAULT_USERS_PER_PAGE = '10'
DEFAULT_SECRET_KEY = 'hard to guess string'
class Config: class Config:
''' ### Flask ### ''' ''' ### Database ### '''
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string' DB_HOST = os.environ.get('NOPAQUE_DB_HOST')
DB_NAME = os.environ.get('NOPAQUE_DB_NAME')
''' ### Flask-Mail ### ''' DB_PASSWORD = os.environ.get('NOPAQUE_DB_PASSWORD')
MAIL_SERVER = os.environ.get('MAIL_SERVER') DB_PORT = os.environ.get('NOPAQUE_DB_PORT', DEFAULT_DB_PORT)
MAIL_PORT = int(os.environ.get('MAIL_PORT')) DB_USERNAME = os.environ.get('NOPAQUE_DB_USERNAME')
MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS').lower() == 'true' SQLALCHEMY_DATABASE_URI = 'postgresql://{}:{}@{}:{}/{}'.format(
MAIL_USERNAME = os.environ.get('MAIL_USERNAME') DB_USERNAME, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME)
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
''' ### Flask-SQLAlchemy ### '''
SQLALCHEMY_DATABASE_URI = 'postgresql://{}:{}@db/{}'.format(
os.environ.get('POSTGRES_USER'),
os.environ.get('POSTGRES_PASSWORD'),
os.environ.get('POSTGRES_DB_NAME'))
SQLALCHEMY_RECORD_QUERIES = True SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_TRACK_MODIFICATIONS = False
''' ### nopaque ### ''' ''' ### Email ### '''
NOPAQUE_ADMIN = os.environ.get('NOPAQUE_ADMIN') MAIL_DEFAULT_SENDER = os.environ.get('NOPAQUE_SMTP_DEFAULT_SENDER')
NOPAQUE_CONTACT = os.environ.get('NOPAQUE_CONTACT') MAIL_PASSWORD = os.environ.get('NOPAQUE_SMTP_PASSWORD')
NOPAQUE_MAIL_SENDER = os.environ.get('NOPAQUE_MAIL_SENDER') MAIL_PORT = os.environ.get('NOPAQUE_SMTP_PORT')
NOPAQUE_MAIL_SUBJECT_PREFIX = '[nopaque]' MAIL_SERVER = os.environ.get('NOPAQUE_SMTP_SERVER')
NOPAQUE_PROTOCOL = os.environ.get('NOPAQUE_PROTOCOL') MAIL_USERNAME = os.environ.get('NOPAQUE_SMTP_USERNAME')
NOPAQUE_STORAGE = os.environ.get('NOPAQUE_STORAGE') MAIL_USE_SSL = os.environ.get('NOPAQUE_SMTP_USE_SSL',
DEFAULT_SMTP_USE_SSL).lower() == 'true'
MAIL_USE_TLS = os.environ.get('NOPAQUE_SMTP_USE_TLS',
DEFAULT_SMTP_USE_TLS).lower() == 'true'
os.makedirs('logs', exist_ok=True) ''' ### General ### '''
logging.basicConfig(filename='logs/nopaque.log', ADMIN_EMAIL_ADRESS = os.environ.get('NOPAQUE_ADMIN_EMAIL_ADRESS')
format='[%(asctime)s] %(levelname)s in ' CONTACT_EMAIL_ADRESS = os.environ.get('NOPAQUE_CONTACT_EMAIL_ADRESS')
'%(pathname)s:%(lineno)d - %(message)s', DATA_DIR = os.environ.get('NOPAQUE_DATA_DIR', DEFAULT_DATA_DIR)
datefmt='%Y-%m-%d %H:%M:%S', filemode='w') DEBUG = os.environ.get('NOPAQUE_DEBUG', DEFAULT_DEBUG).lower() == 'true'
NUM_PROXIES = int(os.environ.get('NOPAQUE_NUM_PROXIES',
''' ### Security enhancements ### ''' DEFAULT_NUM_PROXIES))
if NOPAQUE_PROTOCOL == 'https': PROTOCOL = os.environ.get('NOPAQUE_PROTOCOL', DEFAULT_PROTOCOL)
''' ### Flask ### ''' RESSOURCES_PER_PAGE = int(os.environ.get('NOPAQUE_RESSOURCES_PER_PAGE',
SESSION_COOKIE_SECURE = True DEFAULT_RESSOURCES_PER_PAGE))
SECRET_KEY = os.environ.get('NOPAQUE_SECRET_KEY', DEFAULT_SECRET_KEY)
''' ### Flask-Login ### ''' USERS_PER_PAGE = int(os.environ.get('NOPAQUE_USERS_PER_PAGE',
DEFAULT_USERS_PER_PAGE))
if PROTOCOL == 'https':
REMEMBER_COOKIE_HTTPONLY = True REMEMBER_COOKIE_HTTPONLY = True
REMEMBER_COOKIE_SECURE = True REMEMBER_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
@staticmethod ''' ### Logging ### '''
def init_app(app): LOG_DATE_FORMAT = os.environ.get('NOPAQUE_LOG_DATE_FORMAT',
proxy_fix_kwargs = {'x_for': 1, 'x_host': 1, 'x_port': 1, 'x_proto': 1} DEFAULT_LOG_DATE_FORMAT)
app.wsgi_app = ProxyFix(app.wsgi_app, **proxy_fix_kwargs) LOG_FILE = os.environ.get('NOPAQUE_LOG_FILE', DEFAULT_LOG_FILE)
LOG_FORMAT = os.environ.get('NOPAQUE_LOG_FORMAT', DEFAULT_LOG_FORMAT)
LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL', DEFAULT_LOG_LEVEL)
''' ### Message queue ### '''
MQ_HOST = os.environ.get('NOPAQUE_MQ_HOST')
MQ_PORT = os.environ.get('NOPAQUE_MQ_PORT')
MQ_TYPE = os.environ.get('NOPAQUE_MQ_TYPE')
SOCKETIO_MESSAGE_QUEUE_URI = \
'{}://{}:{}/'.format(MQ_TYPE, MQ_HOST, MQ_PORT)
class DevelopmentConfig(Config): def init_app(self, app):
''' ### Flask ### ''' # Configure logging according to the corresponding (LOG_*) config
DEBUG = True # entries
logging.basicConfig(datefmt=self.LOG_DATE_FORMAT,
''' ### nopaque ### ''' filename=self.LOG_FILE,
NOPAQUE_LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL') or 'DEBUG' format=self.LOG_FORMAT,
logging.basicConfig(level=NOPAQUE_LOG_LEVEL) level=self.LOG_LEVEL)
# Apply the ProxyFix middleware if nopaque is running behind reverse
# proxies. (NUM_PROXIES indicates the number of reverse proxies running
class TestingConfig(Config): # in front of nopaque)
''' ### Flask ### ''' if self.NUM_PROXIES > 0:
TESTING = True app.wsgi_app = ProxyFix(app.wsgi_app,
x_for=self.NUM_PROXIES,
''' ### Flask-SQLAlchemy ### ''' x_host=self.NUM_PROXIES,
SQLALCHEMY_DATABASE_URI = 'sqlite://' x_port=self.NUM_PROXIES,
x_proto=self.NUM_PROXIES)
''' ### Flask-WTF ### '''
WTF_CSRF_ENABLED = False
class ProductionConfig(Config):
''' ### nopaque ### '''
NOPAQUE_LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL') or 'ERROR'
logging.basicConfig(level=NOPAQUE_LOG_LEVEL)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig,
}

View File

@ -1,16 +0,0 @@
#!/bin/bash
echo "Waiting for db..."
wait-for-it db:5432 --strict --timeout=0
echo "Waiting for redis..."
wait-for-it redis:6379 --strict --timeout=0
source venv/bin/activate
if [ $# -eq 0 ]; then
flask deploy
python nopaque.py
elif [ $1 == "flask" ]; then
flask ${@:2}
else
echo "$0 [flask [options]]"
fi

View File

@ -5,9 +5,8 @@ from app.models import (Corpus, CorpusFile, Job, JobInput, JobResult,
NotificationData, NotificationEmailData, QueryResult, NotificationData, NotificationEmailData, QueryResult,
Role, User) Role, User)
from flask_migrate import Migrate, upgrade from flask_migrate import Migrate, upgrade
import os
app = create_app(os.getenv('FLASK_CONFIG') or 'default') app = create_app()
migrate = Migrate(app, db, compare_type=True) migrate = Migrate(app, db, compare_type=True)

View File

@ -17,6 +17,3 @@ class BasicsTestCase(unittest.TestCase):
def test_app_exists(self): def test_app_exists(self):
self.assertFalse(current_app is None) self.assertFalse(current_app is None)
def test_app_is_testing(self):
self.assertTrue(current_app.config['TESTING'])