Huge config update and smtp fix for daemon

This commit is contained in:
Patrick Jentsch 2020-10-08 12:34:02 +02:00
parent 5e221d90ad
commit dff92cbf4d
43 changed files with 613 additions and 1204 deletions

189
.env.tpl
View File

@ -1,64 +1,145 @@
### Build ###
# Bash: getent group docker | cut -d: -f3
DOCKER_GID=
# Bash: id -g
GID=
# Bash: id -u
UID=
################################################################################
# Docker #
################################################################################
# DEFAULT: ./db
# NOTE: Use `.` as <project-root-dir>
# HOST_DB_DIR=
# Example: 999
# HINT: Use this bash command `getent group docker | cut -d: -f3`
HOST_DOCKER_GID=
# Example: 1000
# HINT: Use this bash command `id -g`
HOST_GID=
# DEFAULT: ./mq
# NOTE: Use `.` as <project-root-dir>
# HOST_MQ_DIR=
# DEFAULT: ./nopaqued.log
# NOTES: Use `.` as <project-root-dir>,
# This file must be present on container startup
# HOST_NOPAQUE_DAEMON_LOG_FILE=
# DEFAULT: ./nopaque.log
# NOTES: Use `.` as <project-root-dir>,
# This file must be present on container startup
# HOST_NOPAQUE_LOG_FILE=
# Example: 1000
# HINT: Use this bash command `id -u`
HOST_UID=
################################################################################
# Database (only PostgreSQL) #
################################################################################
NOPAQUE_DB_HOST=
### Runtime ###
# Fill out these variables to use the Docker HTTP socket. When doing this, you
# can remove the Docker UNIX socket mount from the docker-compose file.
# Example: /home/nopaqued/.docker
# DOCKER_CERT_PATH=
# Example: host.docker.internal
# DOCKER_HOST=
NOPAQUE_DB_NAME=
NOPAQUE_DB_PASSWORD=
# DEFAULT: 5432
# NOPAQUE_DB_PORT=
NOPAQUE_DB_USERNAME=
################################################################################
# SMTP #
################################################################################
# EXAMPLE: nopaque Admin <nopaque@example.com>
NOPAQUE_SMTP_DEFAULT_SENDER=
NOPAQUE_SMTP_PASSWORD=
# EXAMPLE: smtp.example.com
NOPAQUE_SMTP_SERVER=
# EXAMPLE: 587
NOPAQUE_SMTP_PORT=
# DEFAULT: False
# Choose one: False, True
# DOCKER_TLS_VERIFY=
# NOPAQUE_SMTP_USE_SSL=
# Choose one: development, production, testing
FLASK_CONFIG=
# Bash: python -c "import uuid; print(uuid.uuid4().hex)"
SECRET_KEY=
# Example: -
GITLAB_USERNAME=
# Example: -
GITLAB_PASSWORD=
# Example: smtp.example.com
MAIL_SERVER=
# Example: 587
MAIL_PORT=
# DEFAULT: False
# Choose one: False, True
MAIL_USE_TLS=
# Example: nopaque@example.com
MAIL_USERNAME=
# Example: -
MAIL_PASSWORD=
# NOPAQUE_SMTP_USE_TLS=
# Example: nopaque@example.com
NOPAQUE_ADMIN=
# Example: nopaque@example.com
NOPAQUE_CONTACT=
# Example: nopaque.localhost
NOPAQUE_DOMAIN=
# EXAMPLE: nopaque@example.com
NOPAQUE_SMTP_USERNAME=
################################################################################
# General #
################################################################################
# Example: admin.nopaque@example.com
NOPAQUE_ADMIN_EMAIL_ADRESS=
# Example: contact.nopaque@example.com
NOPAQUE_CONTACT_EMAIL_ADRESS=
# DEFAULT: /mnt/nopaque
# NOTE: This must be a network share and it must be available on all Docker Swarm nodes
# NOPAQUE_DATA_DIR=
# DEFAULT: False
# Choose one: False, True
NOPAQUE_EXECUTE_NOTIFICATIONS=
# Choose one: CRITICAL, ERROR, WARNING, INFO, DEBUG
NOPAQUE_LOG_LEVEL=
# Example: nopaque Admin <nopaque@example.com>
NOPAQUE_MAIL_SENDER=
# NOPAQUE_DEBUG=
# DEFAULT: localhost
# NOPAQUE_DOMAIN=
# DEFAULT: 0
# NOPAQUE_NUM_PROXIES=
# DEFAULT: http
# Choose one: http, https
NOPAQUE_PROTOCOL=
# Example: /mnt/nopaque
NOPAQUE_STORAGE=
# NOPAQUE_PROTOCOL=
# Example: nopaque
POSTGRES_DB_NAME=
# Example: -
POSTGRES_USER=
# Example: -
POSTGRES_PASSWORD=
# DEFAULT: 5
# NOPAQUE_RESSOURCES_PER_PAGE=
# DEFAULT: hard to guess string
# HINT: Use this bash command `python -c "import uuid; print(uuid.uuid4().hex)"`
NOPAQUE_SECRET_KEY=
# DEFAULT: 10
# NOPAQUE_USERS_PER_PAGE=
################################################################################
# Logging #
################################################################################
# DEFAULT: <nopaqued-root-dir>/nopaqued.log ~ /home/nopaqued/nopaqued.log
# NOTE: Use `.` as <nopaqued-root-dir>
# NOPAQUE_DAEMON_LOG_FILE=
# DEFAULT: %Y-%m-%d %H:%M:%S
# NOPAQUE_LOG_DATE_FORMAT=
# DEFAULT: <nopaque-root-dir>/NOPAQUE.log ~ /home/NOPAQUE/NOPAQUE.log
# NOTE: Use `.` as <nopaque-root-dir>
# NOPAQUE_LOG_FILE=
# DEFAULT: [%(asctime)s] %(levelname)s in %(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s
# NOPAQUE_LOG_FORMAT=
# DEFAULT: ERROR
# Choose one: CRITICAL, ERROR, WARNING, INFO, DEBUG
# NOPAQUE_LOG_LEVEL=
################################################################################
# Message queue #
################################################################################
NOPAQUE_MQ_HOST=
# EXAMPLE: 6379
NOPAQUE_MQ_PORT=
# Choose one of the supported types by Flask-SocketIO
NOPAQUE_MQ_TYPE=

37
.gitignore vendored
View File

@ -1,6 +1,37 @@
docker-compose.override.yml
nopaque.log
nopaqued.log
.DS_Store
*.env
*.py[cod]
# C extensions
*.so
# Docker related files
docker-compose.override.yml
db
mq
# Environment files
.env
# Installer logs
pip-log.txt
# Packages
*.egg
*.egg-info
dist
build
eggs
parts
bin
var
sdist
develop-eggs
.installed.cfg
lib
lib64
__pycache__
# Virtual environment
venv

View File

@ -1,3 +1,6 @@
# Docker related files
Dockerfile
.dockerignore
*.bak
# Packages
__pycache__

View File

@ -1,7 +1,7 @@
FROM python:3.6-slim-stretch
LABEL maintainer="inf_sfb1288@lists.uni-bielefeld.de"
LABEL authors="Patrick Jentsch <p.jentsch@uni-bielefeld.de>, Stephan Porada <sporada@uni-bielefeld.de>"
ARG DOCKER_GID
@ -15,7 +15,7 @@ RUN apt-get update \
build-essential \
libpq-dev \
wait-for-it \
&& rm -rf /var/lib/apt/lists/*
&& rm -r /var/lib/apt/lists/*
RUN groupadd --gid ${DOCKER_GID} --system docker \
@ -31,4 +31,4 @@ RUN python -m venv venv \
&& mkdir logs
ENTRYPOINT ["./docker-entrypoint.sh"]
ENTRYPOINT ["./boot.sh"]

9
daemon/boot.sh Executable file
View File

@ -0,0 +1,9 @@
#!/bin/bash
echo "Waiting for db..."
wait-for-it "${NOPAQUE_DB_HOST}:${NOPAQUE_DB_PORT:-5432}" --strict --timeout=0
echo "Waiting for nopaque..."
wait-for-it nopaque:5000 --strict --timeout=0
source venv/bin/activate
python nopaqued.py

61
daemon/config.py Normal file
View File

@ -0,0 +1,61 @@
import logging
import os
root_dir = os.path.abspath(os.path.dirname(__file__))
DEFAULT_DATA_DIR = os.path.join('/mnt/data')
DEFAULT_DB_PORT = '5432'
DEFAULT_DOMAIN = 'localhost'
DEFAULT_LOG_DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
DEFAULT_LOG_FILE = os.path.join(root_dir, 'nopaqued.log')
DEFAULT_LOG_FORMAT = ('[%(asctime)s] %(levelname)s in %(pathname)s '
'(function: %(funcName)s, line: %(lineno)d): '
'%(message)s')
DEFAULT_LOG_LEVEL = 'ERROR'
DEFAULT_MAIL_USE_SSL = 'False'
DEFAULT_MAIL_USE_TLS = 'False'
DEFAULT_PROTOCOL = 'http'
class Config:
''' ### Database ### '''
DB_HOST = os.environ.get('NOPAQUE_DB_HOST')
DB_NAME = os.environ.get('NOPAQUE_DB_NAME')
DB_PASSWORD = os.environ.get('NOPAQUE_DB_PASSWORD')
DB_PORT = os.environ.get('NOPAQUE_DB_PORT', DEFAULT_DB_PORT)
DB_USERNAME = os.environ.get('NOPAQUE_DB_USERNAME')
SQLALCHEMY_DATABASE_URI = 'postgresql://{}:{}@{}:{}/{}'.format(
DB_USERNAME, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME)
''' ### SMTP ### '''
SMTP_DEFAULT_SENDER = os.environ.get('NOPAQUE_SMTP_DEFAULT_SENDER')
SMTP_PASSWORD = os.environ.get('NOPAQUE_SMTP_PASSWORD')
SMTP_PORT = os.environ.get('NOPAQUE_SMTP_PORT')
SMTP_SERVER = os.environ.get('NOPAQUE_SMTP_SERVER')
SMTP_USERNAME = os.environ.get('NOPAQUE_SMTP_USERNAME')
SMTP_USE_SSL = os.environ.get('NOPAQUE_SMTP_USE_SSL',
DEFAULT_MAIL_USE_SSL).lower() == 'true'
SMTP_USE_TLS = os.environ.get('NOPAQUE_SMTP_USE_TLS',
DEFAULT_MAIL_USE_TLS).lower() == 'true'
''' ### General ### '''
DATA_DIR = os.environ.get('NOPAQUE_DATA_DIR', DEFAULT_DATA_DIR)
DOMAIN = os.environ.get('NOPAQUE_DOMAIN', DEFAULT_DOMAIN)
PROTOCOL = os.environ.get('NOPAQUE_PROTOCOL', DEFAULT_PROTOCOL)
''' ### Logging ### '''
LOG_DATE_FORMAT = os.environ.get('NOPAQUE_LOG_DATE_FORMAT',
DEFAULT_LOG_DATE_FORMAT)
LOG_FILE = os.environ.get('NOPAQUE_DAEMON_LOG_FILE', DEFAULT_LOG_FILE)
LOG_FORMAT = os.environ.get('NOPAQUE_LOG_FORMAT', DEFAULT_LOG_FORMAT)
LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL', DEFAULT_LOG_LEVEL)
def init_app(self):
# Configure logging according to the corresponding (LOG_*) config
# entries
logging.basicConfig(datefmt=self.LOG_DATE_FORMAT,
filename=self.LOG_FILE,
format=self.LOG_FORMAT,
level=self.LOG_LEVEL)

View File

@ -1,9 +0,0 @@
#!/bin/bash
echo "Waiting for db..."
wait-for-it db:5432 --strict --timeout=0
echo "Waiting for web..."
wait-for-it web:5000 --strict --timeout=0
source venv/bin/activate
python nopaqued.py

View File

@ -1,30 +0,0 @@
import os
import logging
def init_logger():
'''
Functions initiates a logger instance.
'''
os.makedirs('logs', exist_ok=True)
logging.basicConfig(filename='logs/nopaqued.log',
format='[%(asctime)s] %(levelname)s in '
'%(pathname)s:%(lineno)d - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S', filemode='w')
NOPAQUE_LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL')
if NOPAQUE_LOG_LEVEL is None:
FLASK_CONFIG = os.environ.get('FLASK_CONFIG')
if FLASK_CONFIG == 'development':
logging.basicConfig(level='DEBUG')
elif FLASK_CONFIG == 'testing':
# TODO: Set an appropriate log level
pass
elif FLASK_CONFIG == 'production':
logging.basicConfig(level='ERROR')
else:
logging.basicConfig(level=NOPAQUE_LOG_LEVEL)
return logging.getLogger(__name__)
if __name__ == '__main__':
init_logger()

View File

@ -2,26 +2,20 @@ from tasks.check_corpora import check_corpora
from tasks.check_jobs import check_jobs
from tasks.notify import notify
from time import sleep
import os
def nopaqued():
NOPAQUE_EXECUTE_NOTIFICATIONS = os.environ.get('NOPAQUE_EXECUTE_NOTIFICATIONS', 'True').lower() == 'true' # noqa
threads = {'check_corpora': None, 'check_jobs': None, 'notify': None}
check_corpora_thread = check_corpora()
check_jobs_thread = check_jobs()
notify_thread = notify()
threads['check_corpora'] = check_corpora()
threads['check_jobs'] = check_jobs()
threads['notify'] = notify(NOPAQUE_EXECUTE_NOTIFICATIONS)
while True:
if not threads['check_corpora'].is_alive():
threads['check_corpora'] = check_corpora()
if not threads['check_jobs'].is_alive():
threads['check_jobs'] = check_jobs()
if not threads['notify'].is_alive():
threads['notify'] = notify(NOPAQUE_EXECUTE_NOTIFICATIONS)
# If execute_notifications True mails are sent.
# If execute_notifications False no mails are sent.
# But notification status will be set nonetheless.
if not check_corpora_thread.is_alive():
check_corpora_thread = check_corpora()
if not check_jobs_thread.is_alive():
check_jobs_thread = check_jobs()
if not notify_thread.is_alive():
notify_thread = notify()
sleep(3)

View File

@ -1,455 +0,0 @@
from notify.notification import Notification
from notify.service import NotificationService
from sqlalchemy import create_engine, asc
from sqlalchemy.orm import Session, relationship
from sqlalchemy.ext.automap import automap_base
from datetime import datetime
from time import sleep
import docker
import json
import logging
import os
import shutil
''' Global constants '''
NOPAQUE_STORAGE = os.environ.get('NOPAQUE_STORAGE')
''' Global variables '''
docker_client = None
session = None
# Classes for database models
Base = automap_base()
class Corpus(Base):
__tablename__ = 'corpora'
files = relationship('CorpusFile', collection_class=set)
class CorpusFile(Base):
__tablename__ = 'corpus_files'
class Job(Base):
__tablename__ = 'jobs'
inputs = relationship('JobInput', collection_class=set)
results = relationship('JobResult', collection_class=set)
notification_data = relationship('NotificationData', collection_class=list)
notification_email_data = relationship('NotificationEmailData', collection_class=list)
class NotificationData(Base):
__tablename__ = 'notification_data'
job = relationship('Job', collection_class=set)
class NotificationEmailData(Base):
__tablename__ = 'notification_email_data'
job = relationship('Job', collection_class=set)
class JobInput(Base):
__tablename__ = 'job_results'
class JobResult(Base):
__tablename__ = 'job_results'
class User(Base):
__tablename__ = 'users'
jobs = relationship('Job', collection_class=set)
corpora = relationship('Corpus', collection_class=set)
def check_corpora():
corpora = session.query(Corpus).all()
for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora):
__create_build_corpus_service(corpus)
for corpus in filter(lambda corpus: (corpus.status == 'queued'
or corpus.status == 'running'),
corpora):
__checkout_build_corpus_service(corpus)
for corpus in filter(lambda corpus: corpus.status == 'start analysis',
corpora):
__create_cqpserver_container(corpus)
for corpus in filter(lambda corpus: corpus.status == 'stop analysis',
corpora):
__remove_cqpserver_container(corpus)
def __create_build_corpus_service(corpus):
corpus_dir = os.path.join(NOPAQUE_STORAGE, str(corpus.user_id),
'corpora', str(corpus.id))
corpus_data_dir = os.path.join(corpus_dir, 'data')
corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt')
corpus_registry_dir = os.path.join(corpus_dir, 'registry')
if os.path.exists(corpus_data_dir):
shutil.rmtree(corpus_data_dir)
if os.path.exists(corpus_registry_dir):
shutil.rmtree(corpus_registry_dir)
os.mkdir(corpus_data_dir)
os.mkdir(corpus_registry_dir)
service_args = {'command': 'docker-entrypoint.sh build-corpus',
'constraints': ['node.role==worker'],
'labels': {'origin': 'nopaque',
'type': 'corpus.prepare',
'corpus_id': str(corpus.id)},
'mounts': [corpus_file + ':/root/files/corpus.vrt:ro',
corpus_data_dir + ':/corpora/data:rw',
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'name': 'build-corpus_{}'.format(corpus.id),
'restart_policy': docker.types.RestartPolicy()}
service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest')
try:
service = docker_client.services.get(service_args['name'])
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
service.remove()
try:
docker_client.services.create(service_image, **service_args)
except docker.errors.DockerException:
corpus.status = 'failed'
else:
corpus.status = 'queued'
def __checkout_build_corpus_service(corpus):
service_name = 'build-corpus_{}'.format(corpus.id)
try:
service = docker_client.services.get(service_name)
except docker.errors.NotFound:
logger.error('__checkout_build_corpus_service({}):'.format(corpus.id)
+ ' The service does not exist.'
+ ' (stauts: {} -> failed)'.format(corpus.status))
corpus.status = 'failed'
return
except docker.errors.DockerException:
return
service_tasks = service.tasks()
if not service_tasks:
return
task_state = service_tasks[0].get('Status').get('State')
if corpus.status == 'queued' and task_state != 'pending':
corpus.status = 'running'
elif corpus.status == 'running' and task_state == 'complete':
service.remove()
corpus.status = 'prepared'
elif corpus.status == 'running' and task_state == 'failed':
service.remove()
corpus.status = task_state
def __create_cqpserver_container(corpus):
corpus_dir = os.path.join(NOPAQUE_STORAGE, str(corpus.user_id),
'corpora', str(corpus.id))
corpus_data_dir = os.path.join(corpus_dir, 'data')
corpus_registry_dir = os.path.join(corpus_dir, 'registry')
container_args = {'command': 'cqpserver',
'detach': True,
'volumes': [corpus_data_dir + ':/corpora/data:rw',
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'name': 'cqpserver_{}'.format(corpus.id),
'network': 'opaque_default'}
container_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest')
try:
container = docker_client.containers.get(container_args['name'])
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
container.remove(force=True)
try:
docker_client.containers.run(container_image, **container_args)
except docker.errors.DockerException:
return
else:
corpus.status = 'analysing'
def __remove_cqpserver_container(corpus):
container_name = 'cqpserver_{}'.format(corpus.id)
try:
container = docker_client.containers.get(container_name)
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
container.remove(force=True)
corpus.status = 'prepared'
def check_jobs():
jobs = session.query(Job).all()
for job in filter(lambda job: job.status == 'submitted', jobs):
__create_job_service(job)
for job in filter(lambda job: (job.status == 'queued'), jobs):
__checkout_job_service(job)
# __add_notification_data(job, 'queued')
for job in filter(lambda job: (job.status == 'running'), jobs):
__checkout_job_service(job)
# __add_notification_data(job, 'running')
# for job in filter(lambda job: job.status == 'complete', jobs):
# __add_notification_data(job, 'complete')
# for job in filter(lambda job: job.status == 'failed', jobs):
#__add_notification_data(job, 'failed')
for job in filter(lambda job: job.status == 'canceling', jobs):
__remove_job_service(job)
def __add_notification_data(job, notified_on_status):
# checks if user wants any notifications at all
if (job.user.setting_job_status_mail_notifications == 'none'):
# logger.warning('User does not want any notifications!')
return
# checks if user wants only notification on completed jobs
elif (job.user.setting_job_status_mail_notifications == 'end'
and notified_on_status != 'complete'):
# logger.warning('User only wants notifications on job completed!')
return
else:
# check if a job already has associated NotificationData
notification_exists = len(job.notification_data)
# create notification_data for current job if there is none
if (notification_exists == 0):
notification_data = NotificationData(job_id=job.id)
session.add(notification_data)
session.commit() # If no commit job will have no NotificationData
# logger.warning('Created NotificationData for current Job.'))
else:
pass
# logger.warning('Job already had notification: {}'.format(notification_exists))
if (job.notification_data[0].notified_on != notified_on_status):
notification_email_data = NotificationEmailData(job_id=job.id)
notification_email_data.notify_status = notified_on_status
notification_email_data.creation_date = datetime.utcnow()
job.notification_data[0].notified_on = notified_on_status
session.add(notification_email_data)
# logger.warning('Created NotificationEmailData for current Job.')
else:
# logger.warning('NotificationEmailData has already been created for current Job!')
pass
def __create_job_service(job):
job_dir = os.path.join(NOPAQUE_STORAGE, str(job.user_id), 'jobs',
str(job.id))
service_args = {'command': ('{} /files /files/output'.format(job.service)
+ ' {}'.format(job.secure_filename if job.service == 'file-setup' else '')
+ ' --log-dir /files'
+ ' --zip [{}]_{}'.format(job.service, job.secure_filename)
+ ' ' + ' '.join(json.loads(job.service_args))),
'constraints': ['node.role==worker'],
'labels': {'origin': 'nopaque',
'type': 'service.{}'.format(job.service),
'job_id': str(job.id)},
'mounts': [job_dir + ':/files:rw'],
'name': 'job_{}'.format(job.id),
'resources': docker.types.Resources(
cpu_reservation=job.n_cores * (10 ** 9),
mem_reservation=job.mem_mb * (10 ** 6)),
'restart_policy': docker.types.RestartPolicy()}
service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/'
+ job.service + ':' + job.service_version)
try:
service = docker_client.services.get(service_args['name'])
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
service.remove()
try:
docker_client.services.create(service_image, **service_args)
except docker.errors.DockerException:
job.status = 'failed'
else:
job.status = 'queued'
def __checkout_job_service(job):
service_name = 'job_{}'.format(job.id)
try:
service = docker_client.services.get(service_name)
except docker.errors.NotFound:
logger.error('__checkout_job_service({}):'.format(job.id)
+ ' The service does not exist.'
+ ' (stauts: {} -> failed)'.format(job.status))
job.status = 'failed'
return
except docker.errors.DockerException:
return
service_tasks = service.tasks()
if not service_tasks:
return
task_state = service_tasks[0].get('Status').get('State')
if job.status == 'queued' and task_state != 'pending':
job.status = 'running'
elif (job.status == 'running'
and (task_state == 'complete' or task_state == 'failed')):
service.remove()
job.end_date = datetime.utcnow()
job.status = task_state
if task_state == 'complete':
results_dir = os.path.join(NOPAQUE_STORAGE, str(job.user_id),
'jobs', str(job.id), 'output')
results = filter(lambda x: x.endswith('.zip'),
os.listdir(results_dir))
for result in results:
job_result = JobResult(dir=results_dir, filename=result,
job_id=job.id)
session.add(job_result)
def __remove_job_service(job):
service_name = 'job_{}'.format(job.id)
try:
service = docker_client.services.get(service_name)
except docker.errors.NotFound:
job.status = 'canceled'
except docker.errors.DockerException:
return
else:
service.update(mounts=None)
service.remove()
def handle_jobs():
check_jobs()
def handle_corpora():
check_corpora()
# Email notification functions
def create_mail_notifications(notification_service):
notification_email_data = session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all()
notifications = {}
for data in notification_email_data:
notification = Notification()
notification.set_addresses(notification_service.email_address,
data.job.user.email)
subject_template = '[nopaque] Status update for your Job/Corpora: {title}!'
subject_template_values_dict = {'title': data.job.title}
protocol = os.environ.get('NOPAQUE_PROTOCOL')
domain = os.environ.get('NOPAQUE_DOMAIN')
url = '{protocol}://{domain}/{jobs}/{id}'.format(
protocol=protocol, domain=domain, jobs='jobs', id=data.job.id)
body_template_values_dict = {'username': data.job.user.username,
'id': data.job.id,
'title': data.job.title,
'status': data.notify_status,
'time': data.creation_date,
'url': url}
notification.set_notification_content(subject_template,
subject_template_values_dict,
'templates/notification_messages/notification.txt',
'templates/notification_messages/notification.html',
body_template_values_dict)
notifications[data.job.id] = notification
# Using a dictionary for notifications avoids sending multiple mails
# if the status of a job changes in a few seconds. The user will not get
# swamped with mails for queued, running and complete if those happen in
# in a few seconds. Only the last update will be sent.
session.delete(data)
return notifications
def send_mail_notifications(notifications, notification_service):
for key, notification in notifications.items():
try:
notification_service.send(notification)
notification_service.mail_limit_exceeded = False
except Exception as e:
# Adds notifications to unsent if mail server exceded limit for
# consecutive mail sending
notification_service.not_sent[key] = notification
notification_service.mail_limit_exceeded = True
def notify():
# Initialize notification service
notification_service = NotificationService()
notification_service.get_smtp_configs()
notification_service.set_server()
# create notifications (content, recipient etc.)
notifications = create_mail_notifications(notification_service)
# only login and send mails if there are any notifications
if (len(notifications) > 0):
try:
notification_service.login()
# combine new and unsent notifications
notifications.update(notification_service.not_sent)
# send all notifications
send_mail_notifications(notifications, notification_service)
# remove unsent notifications because they have been sent now
# but only if mail limit has not been exceeded
if (notification_service.mail_limit_exceeded is not True):
notification_service.not_sent = {}
notification_service.quit()
except Exception as e:
notification_service.not_sent.update(notifications)
# Logger functions #
def init_logger():
'''
Functions initiates a logger instance.
'''
global logger
if not os.path.isfile('logs/nopaqued.log'):
file_path = os.path.join(os.getcwd(), 'logs/nopaqued.log')
log = open(file_path, 'w+')
log.close()
logging.basicConfig(datefmt='%Y-%m-%d %H:%M:%S',
filemode='w', filename='logs/nopaqued.log',
format='%(asctime)s - %(levelname)s - %(name)s - '
'%(filename)s - %(lineno)d - %(message)s')
logger = logging.getLogger(__name__)
if os.environ.get('FLASK_CONFIG') == 'development':
logger.setLevel(logging.DEBUG)
if os.environ.get('FLASK_CONFIG') == 'production':
logger.setLevel(logging.WARNING)
def nopaqued():
global Base
global docker_client
global session
engine = create_engine(
'postgresql://{}:{}@db/{}'.format(
os.environ.get('POSTGRES_USER'),
os.environ.get('POSTGRES_PASSWORD'),
os.environ.get('POSTGRES_DB_NAME')))
Base.prepare(engine, reflect=True)
session = Session(engine)
session.commit()
docker_client = docker.from_env()
docker_client.login(password=os.environ.get('GITLAB_PASSWORD'),
registry="gitlab.ub.uni-bielefeld.de:4567",
username=os.environ.get('GITLAB_USERNAME'))
# executing background functions
while True:
handle_jobs()
handle_corpora()
# notify()
session.commit()
sleep(3)
if __name__ == '__main__':
init_logger()
nopaqued()

View File

@ -11,16 +11,17 @@ class Notification(EmailMessage):
body_html_template_path,
body_template_values_dict):
# Create subject with subject_template_values_dict
self['subject'] = subject_template.format(**subject_template_values_dict)
self['subject'] = subject_template.format(
**subject_template_values_dict)
# Open template files and insert values from body_template_values_dict
with open(body_txt_template_path) as nfile:
self.body_txt = nfile.read().format(**body_template_values_dict)
self.body = nfile.read().format(**body_template_values_dict)
with open(body_html_template_path) as nfile:
self.body_html = nfile.read().format(**body_template_values_dict)
self.html = nfile.read().format(**body_template_values_dict)
# Set txt of email
self.set_content(self.body_txt)
self.set_content(self.body)
# Set html alternative
self.add_alternative(self.body_html, subtype='html')
self.add_alternative(self.html, subtype='html')
def set_addresses(self, sender, recipient):
self['From'] = sender

View File

@ -1,41 +1,16 @@
import os
import smtplib
class NotificationService(object):
class NotificationService:
"""This is a nopaque notifcation service object."""
def __init__(self, execute_flag):
super(NotificationService, self).__init__()
self.execute_flag = execute_flag # If True mails are sent normaly
# If False mails are not sent. Used to avoid sending mails for jobs
# that have been completed a long time ago. Use this if you implement
# notify into an already existing nopaque instance. Change it to True
# after the daemon has run one time with the flag set to False
self.not_sent = {} # Holds due to an error unsent email notifications
self.mail_limit_exceeded = False # Bool to show if the mail server
# stoped sending mails due to exceeding its sending limit
def get_smtp_configs(self):
self.password = os.environ.get('MAIL_PASSWORD')
self.port = os.environ.get('MAIL_PORT')
self.server_str = os.environ.get('MAIL_SERVER')
self.tls = os.environ.get('MAIL_USE_TLS')
self.username = os.environ.get('MAIL_USERNAME').split("@")[0]
self.email_address = os.environ.get('MAIL_USERNAME')
def set_server(self):
self.smtp_server = smtplib.SMTP(host=self.server_str, port=self.port)
def login(self):
self.smtp_server.starttls()
self.smtp_server.login(self.username, self.password)
def __init__(self, smtp):
# Bool to show if the mail server stoped sending mails due to exceeding
# its sending limit
self.mail_limit_exceeded = False
# Holds due to an error unsent email notifications
self.not_sent = {}
self.smtp = smtp
def send(self, email):
if self.execute_flag:
self.smtp_server.send_message(email)
else:
return
self.smtp.send_message(email)
def quit(self):
self.smtp_server.quit()
self.smtp.quit()

View File

@ -1,6 +1,6 @@
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import relationship
from tasks import engine
from . import engine
Base = automap_base()

View File

@ -1,22 +1,11 @@
from config import Config
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
import os
import docker
''' Global constants '''
NOPAQUE_STORAGE = os.environ.get('NOPAQUE_STORAGE')
''' Docker client '''
config = Config()
config.init_app()
docker_client = docker.from_env()
docker_client.login(password=os.environ.get('GITLAB_PASSWORD'),
registry="gitlab.ub.uni-bielefeld.de:4567",
username=os.environ.get('GITLAB_USERNAME'))
''' Scoped session '''
engine = create_engine(
'postgresql://{}:{}@db/{}'.format(
os.environ.get('POSTGRES_USER'),
os.environ.get('POSTGRES_PASSWORD'),
os.environ.get('POSTGRES_DB_NAME')))
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI)
Session = scoped_session(sessionmaker(bind=engine))

View File

@ -1,16 +1,16 @@
from logger.logger import init_logger
from tasks import Session, docker_client, NOPAQUE_STORAGE
from tasks.decorators import background
from tasks.Models import Corpus
from . import config, docker_client, Session
from .decorators import background
from .models import Corpus
import docker
import logging
import os
import shutil
@background
def check_corpora():
c_session = Session()
corpora = c_session.query(Corpus).all()
session = Session()
corpora = session.query(Corpus).all()
for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora):
__create_build_corpus_service(corpus)
for corpus in filter(lambda corpus: (corpus.status == 'queued'
@ -23,13 +23,15 @@ def check_corpora():
for corpus in filter(lambda corpus: corpus.status == 'stop analysis',
corpora):
__remove_cqpserver_container(corpus)
c_session.commit()
session.commit()
Session.remove()
def __create_build_corpus_service(corpus):
corpus_dir = os.path.join(NOPAQUE_STORAGE, str(corpus.user_id),
'corpora', str(corpus.id))
corpus_dir = os.path.join(config.DATA_DIR,
str(corpus.user_id),
'corpora',
str(corpus.id))
corpus_data_dir = os.path.join(corpus_dir, 'data')
corpus_file = os.path.join(corpus_dir, 'merged', 'corpus.vrt')
corpus_registry_dir = os.path.join(corpus_dir, 'registry')
@ -49,7 +51,8 @@ def __create_build_corpus_service(corpus):
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'name': 'build-corpus_{}'.format(corpus.id),
'restart_policy': docker.types.RestartPolicy()}
service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest')
service_image = \
'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
try:
service = docker_client.services.get(service_args['name'])
except docker.errors.NotFound:
@ -67,12 +70,11 @@ def __create_build_corpus_service(corpus):
def __checkout_build_corpus_service(corpus):
logger = init_logger()
service_name = 'build-corpus_{}'.format(corpus.id)
try:
service = docker_client.services.get(service_name)
except docker.errors.NotFound:
logger.error('__checkout_build_corpus_service({}):'.format(corpus.id)
logging.error('__checkout_build_corpus_service({}):'.format(corpus.id)
+ ' The service does not exist.'
+ ' (stauts: {} -> failed)'.format(corpus.status))
corpus.status = 'failed'
@ -94,8 +96,10 @@ def __checkout_build_corpus_service(corpus):
def __create_cqpserver_container(corpus):
corpus_dir = os.path.join(NOPAQUE_STORAGE, str(corpus.user_id),
'corpora', str(corpus.id))
corpus_dir = os.path.join(config.DATA_DIR,
str(corpus.user_id),
'corpora',
str(corpus.id))
corpus_data_dir = os.path.join(corpus_dir, 'data')
corpus_registry_dir = os.path.join(corpus_dir, 'registry')
container_args = {'command': 'cqpserver',
@ -104,7 +108,8 @@ def __create_cqpserver_container(corpus):
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'name': 'cqpserver_{}'.format(corpus.id),
'network': 'nopaque_default'}
container_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest')
container_image = \
'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
try:
container = docker_client.containers.get(container_args['name'])
except docker.errors.NotFound:

View File

@ -1,46 +1,42 @@
from datetime import datetime
from logger.logger import init_logger
from tasks import Session, docker_client, NOPAQUE_STORAGE
from tasks.decorators import background
from tasks.Models import Job, NotificationData, NotificationEmailData, JobResult
from . import config, docker_client, Session
from .decorators import background
from .models import Job, JobResult, NotificationData, NotificationEmailData
import docker
import logging
import json
import os
@background
def check_jobs():
# logger = init_logger()
cj_session = Session()
jobs = cj_session.query(Job).all()
session = Session()
jobs = session.query(Job).all()
for job in filter(lambda job: job.status == 'submitted', jobs):
__create_job_service(job)
for job in filter(lambda job: (job.status == 'queued'), jobs):
__checkout_job_service(job, cj_session)
__add_notification_data(job, 'queued', cj_session)
for job in filter(lambda job: (job.status == 'running'), jobs):
__checkout_job_service(job, cj_session)
__add_notification_data(job, 'running', cj_session)
for job in filter(lambda job: job.status == 'queued', jobs):
__checkout_job_service(job, session)
__add_notification_data(job, 'queued', session)
for job in filter(lambda job: job.status == 'running', jobs):
__checkout_job_service(job, session)
__add_notification_data(job, 'running', session)
for job in filter(lambda job: job.status == 'complete', jobs):
__add_notification_data(job, 'complete', cj_session)
__add_notification_data(job, 'complete', session)
for job in filter(lambda job: job.status == 'failed', jobs):
__add_notification_data(job, 'failed', cj_session)
__add_notification_data(job, 'failed', session)
for job in filter(lambda job: job.status == 'canceling', jobs):
__remove_job_service(job)
cj_session.commit()
session.commit()
Session.remove()
def __add_notification_data(job, notified_on_status, scoped_session):
logger = init_logger()
def __add_notification_data(job, notified_on_status, session):
# checks if user wants any notifications at all
if (job.user.setting_job_status_mail_notifications == 'none'):
# logger.warning('User does not want any notifications!')
return
# checks if user wants only notification on completed jobs
elif (job.user.setting_job_status_mail_notifications == 'end'
and notified_on_status != 'complete'):
# logger.warning('User only wants notifications on job completed!')
return
else:
# check if a job already has associated NotificationData
@ -48,27 +44,21 @@ def __add_notification_data(job, notified_on_status, scoped_session):
# create notification_data for current job if there is none
if (notification_exists == 0):
notification_data = NotificationData(job_id=job.id)
scoped_session.add(notification_data)
scoped_session.commit()
session.add(notification_data)
# If no commit job will have no NotificationData
# logger.warning('Created NotificationData for current Job.'))
else:
pass
# logger.warning('Job already had notification: {}'.format(notification_exists))
session.commit()
if (job.notification_data[0].notified_on != notified_on_status):
notification_email_data = NotificationEmailData(job_id=job.id)
notification_email_data.notify_status = notified_on_status
notification_email_data.creation_date = datetime.utcnow()
job.notification_data[0].notified_on = notified_on_status
scoped_session.add(notification_email_data)
logger.warning('Created NotificationEmailData for current Job.')
else:
# logger.warning('NotificationEmailData has already been created for current Job!')
pass
session.add(notification_email_data)
def __create_job_service(job):
job_dir = os.path.join(NOPAQUE_STORAGE, str(job.user_id), 'jobs',
job_dir = os.path.join(config.DATA_DIR,
str(job.user_id),
'jobs',
str(job.id))
cmd = '{} -i /files -o /files/output'.format(job.service)
if job.service == 'file-setup':
@ -105,15 +95,14 @@ def __create_job_service(job):
job.status = 'queued'
def __checkout_job_service(job, scoped_session):
logger = init_logger()
def __checkout_job_service(job, session):
service_name = 'job_{}'.format(job.id)
try:
service = docker_client.services.get(service_name)
except docker.errors.NotFound:
logger.error('__checkout_job_service({}):'.format(job.id)
logging.error('__checkout_job_service({}): '.format(job.id)
+ 'The service does not exist. '
+ ' (stauts: {} -> failed)'.format(job.status))
+ '(status: {} -> failed)'.format(job.status))
job.status = 'failed'
return
except docker.errors.DockerException:
@ -130,14 +119,18 @@ def __checkout_job_service(job, scoped_session):
job.end_date = datetime.utcnow()
job.status = task_state
if task_state == 'complete':
results_dir = os.path.join(NOPAQUE_STORAGE, str(job.user_id),
'jobs', str(job.id), 'output')
results_dir = os.path.join(config.DATA_DIR,
str(job.user_id),
'jobs',
str(job.id),
'output')
results = filter(lambda x: x.endswith('.zip'),
os.listdir(results_dir))
for result in results:
job_result = JobResult(dir=results_dir, filename=result,
job_result = JobResult(dir=results_dir,
filename=result,
job_id=job.id)
scoped_session.add(job_result)
session.add(job_result)
def __remove_job_service(job):

View File

@ -1,29 +1,44 @@
from notify.notification import Notification
from notify.service import NotificationService
from sqlalchemy import asc
from tasks import Session
from tasks.decorators import background
from tasks.Models import NotificationEmailData
import os
from . import config, Session
from .decorators import background
from .models import NotificationEmailData
import logging
import smtplib
@background
def notify(execute_flag):
# If True mails are sent normaly
# If False mails are not sent. Used to avoid sending mails for jobs that
# have been completed a long time ago. Use this if you implement notify
# into an already existing nopaque instance. Change it to True after the
# daemon has run one time with the flag set to False.
# Initialize notification service
notification_service = NotificationService(execute_flag)
notification_service.get_smtp_configs()
notification_service.set_server()
def notify():
session = Session()
if config.SMTP_USE_SSL:
smtp = smtplib.SMTP_SSL(host=config.SMTP_SERVER, port=config.SMTP_PORT)
else:
smtp = smtplib.SMTP(host=config.SMTP_SERVER, port=config.SMTP_PORT)
if config.SMTP_USE_TLS:
smtp.starttls()
try:
smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD)
except smtplib.SMTPHeloError:
logging.warning('The server didnt reply properly to the HELO '
'greeting.')
return
except smtplib.SMTPAuthenticationError as e:
logging.warning('The server didnt accept the username/password '
'combination.')
logging.warning(e)
return
except smtplib.SMTPNotSupportedError:
logging.warning('The AUTH command is not supported by the server.')
return
except smtplib.SMTPException:
logging.warning('No suitable authentication method was found.')
return
notification_service = NotificationService(smtp)
# create notifications (content, recipient etc.)
notifications = __create_mail_notifications(notification_service)
notifications = __create_mail_notifications(notification_service, session)
# only login and send mails if there are any notifications
if (len(notifications) > 0):
try:
notification_service.login()
# combine new and unsent notifications
notifications.update(notification_service.not_sent)
# send all notifications
@ -32,27 +47,25 @@ def notify(execute_flag):
# but only if mail limit has not been exceeded
if (notification_service.mail_limit_exceeded is not True):
notification_service.not_sent = {}
notification_service.quit()
except Exception as e:
notification_service.not_sent.update(notifications)
notification_service.quit()
smtp.quit()
Session.remove()
# Email notification functions
def __create_mail_notifications(notification_service):
mn_session = Session()
notification_email_data = mn_session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all()
def __create_mail_notifications(notification_service, session):
notification_email_data = session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all() # noqa
notifications = {}
for data in notification_email_data:
notification = Notification()
notification.set_addresses(notification_service.email_address,
notification.set_addresses(config.SMTP_DEFAULT_SENDER,
data.job.user.email)
subject_template = '[nopaque] Status update for your Job/Corpora: {title}!'
subject_template = ('[nopaque] Status update for your Job/Corpora: '
'{title}!')
subject_template_values_dict = {'title': data.job.title}
protocol = os.environ.get('NOPAQUE_PROTOCOL')
domain = os.environ.get('NOPAQUE_DOMAIN')
url = '{protocol}://{domain}/{jobs}/{id}'.format(
protocol=protocol, domain=domain, jobs='jobs', id=data.job.id)
url = '{}://{}/{}/{}'.format(config.PROTOCOL,
config.DOMAIN,
'jobs',
data.job.id)
body_template_values_dict = {'username': data.job.user.username,
'id': data.job.id,
'title': data.job.title,
@ -72,9 +85,8 @@ def __create_mail_notifications(notification_service):
# get swamped with mails for queued, running and complete if those
# happen in in a few seconds. Only the last update will be sent.
# This depends on the sleep time interval though.
mn_session.delete(data)
mn_session.commit()
Session.remove()
session.delete(data)
session.commit()
return notifications
@ -83,8 +95,10 @@ def __send_mail_notifications(notifications, notification_service):
try:
notification_service.send(notification)
notification_service.mail_limit_exceeded = False
except Exception as e:
except Exception:
# Adds notifications to unsent if mail server exceded limit for
# consecutive mail sending
logging.warning('limit')
notification_service.not_sent[key] = notification
notification_service.mail_limit_exceeded = True
notification_service.not_sent.update(notifications)

View File

@ -0,0 +1,25 @@
version: "3.5"
services:
nopaque:
ports:
- "5000:5000"
volumes:
# Mount code as volumes
- "./web/app:/home/nopaque/app"
- "./web/boot.sh:/home/nopaque/boot.sh"
- "./web/config.py:/home/nopaque/config.py"
- "./web/migrations:/home/nopaque/migrations"
- "./web/nopaque.py:/home/nopaque/nopaque.py"
- "./web/requirements.txt:/home/nopaque/requirements.txt"
- "./web/tests:/home/nopaque/tests"
nopaqued:
volumes:
# Mount code as volumes
- "./daemon/boot.sh:/home/nopaqued/boot.sh"
- "./daemon/config.py:/home/nopaqued/config.py"
- "./daemon/logger:/home/nopaqued/logger"
- "./daemon/nopaqued.py:/home/nopaqued/nopaqued.py"
- "./daemon/notify:/home/nopaqued/notify"
- "./daemon/requirements.txt:/home/nopaqued/requirements.txt"
- "./daemon/tasks:/home/nopaqued/tasks"

View File

@ -1,51 +0,0 @@
version: "3.5"
networks:
reverse-proxy:
external:
name: reverse-proxy
services:
web:
labels:
- "traefik.docker.network=reverse-proxy"
- "traefik.enable=true"
### <http> ###
- "traefik.http.middlewares.nopaque-header.headers.customrequestheaders.X-Forwarded-Proto=http"
- "traefik.http.routers.nopaque.entrypoints=web"
- "traefik.http.routers.nopaque.middlewares=nopaque-header, redirect-to-https@file"
- "traefik.http.routers.nopaque.rule=Host(`${NOPAQUE_DOMAIN}`)"
### </http> ###
### <https> ###
- "traefik.http.middlewares.nopaque-secure-header.headers.customrequestheaders.X-Forwarded-Proto=https"
- "traefik.http.routers.nopaque-secure.entrypoints=web-secure"
- "traefik.http.routers.nopaque-secure.middlewares=hsts-header@file, nopaque-secure-header"
- "traefik.http.routers.nopaque-secure.rule=Host(`${NOPAQUE_DOMAIN}`)"
- "traefik.http.routers.nopaque-secure.tls.options=intermediate@file"
### </https> ###
### <basicauth help="https://docs.traefik.io/middlewares/basicauth/"> ###
# - "traefik.http.middlewares.nopaque-basicauth.basicauth.users=<USERNAME>:<PASSWORD>"
# - "traefik.http.routers.nopaque.middlewares=nopaque-basicauth, nopaque-header, redirect-to-https@file"
# - "traefik.http.routers.nopaque-secure.middlewares=nopaque-basicauth, hsts-header@file, nopaque-secure-header"
### </basicauth> ###
networks:
- default
- reverse-proxy
volumes:
# Mount code as volumes
- "./web/app:/home/nopaque/app"
- "./web/migrations:/home/nopaque/migrations"
- "./web/tests:/home/nopaque/tests"
- "./web/config.py:/home/nopaque/config.py"
- "./web/docker-entrypoint.sh:/home/nopaque/docker-entrypoint.sh"
- "./web/nopaque.py:/home/nopaque/nopaque.py"
- "./web/requirements.txt:/home/nopaque/requirements.txt"
daemon:
volumes:
# Mount code as volumes
- "./daemon/logger:/home/nopaqued/logger"
- "./daemon/notify:/home/nopaqued/notify"
- "./daemon/tasks:/home/nopaqued/tasks"
- "./daemon/docker-entrypoint.sh:/home/nopaqued/docker-entrypoint.sh"
- "./daemon/nopaqued.py:/home/nopaqued/nopaqued.py"
- "./daemon/requirements.txt:/home/nopaqued/requirements.txt"

View File

@ -0,0 +1,30 @@
################################################################################
# Don't forget to set the NOPAQUE_NUM_PROXIES variable in your .env #
################################################################################
version: "3.5"
networks:
reverse-proxy:
external:
name: reverse-proxy
services:
nopaque:
labels:
- "traefik.docker.network=reverse-proxy"
- "traefik.enable=true"
### <http> ###
- "traefik.http.routers.nopaque.entrypoints=web"
- "traefik.http.routers.nopaque.middlewares=redirect-to-https@file"
- "traefik.http.routers.nopaque.rule=Host(`${NOPAQUE_DOMAIN:-localhost}`)"
### </http> ###
### <https> ###
- "traefik.http.routers.nopaque-secure.entrypoints=web-secure"
- "traefik.http.routers.nopaque-secure.middlewares=hsts-header@file"
- "traefik.http.routers.nopaque-secure.rule=Host(`${NOPAQUE_DOMAIN:-localhost}`)"
- "traefik.http.routers.nopaque-secure.tls.certresolver=<CERTRESOLVER>"
- "traefik.http.routers.nopaque-secure.tls.options=intermediate@file"
### </https> ###
networks:
- default
- reverse-proxy

View File

@ -1,49 +1,49 @@
version: "3.5"
volumes:
redis-trash1:
services:
web:
nopaque:
build:
args:
GID: ${GID}
UID: ${UID}
GID: ${HOST_GID}
UID: ${HOST_UID}
context: ./web
depends_on:
- db
- redis
- mq
env_file: .env
image: nopaque/web
restart: unless-stopped
volumes:
- "./logs:/home/nopaque/logs"
- "${NOPAQUE_STORAGE}:${NOPAQUE_STORAGE}"
daemon:
- "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}"
- "${HOST_NOPAQUE_LOG_FILE-./nopaque.log}:${NOPAQUE_LOG_FILE:-/home/nopaque/nopaque.log}"
nopaqued:
build:
args:
DOCKER_GID: ${DOCKER_GID}
GID: ${GID}
UID: ${UID}
DOCKER_GID: ${HOST_DOCKER_GID}
GID: ${HOST_GID}
UID: ${HOST_UID}
context: ./daemon
depends_on:
- db
- web
- nopaque
env_file: .env
image: nopaque/daemon
restart: unless-stopped
volumes:
- "/var/run/docker.sock:/var/run/docker.sock"
- "./logs:/home/nopaqued/logs"
- "${NOPAQUE_STORAGE}:${NOPAQUE_STORAGE}"
- "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}"
- "${HOST_NOPAQUE_DAEMON_LOG_FILE-./nopaqued.log}:${NOPAQUE_DAEMON_LOG_FILE:-/home/nopaqued/nopaqued.log}"
db:
env_file: .env
environment:
- POSTGRES_DB_NAME=${NOPAQUE_DB_NAME}
- POSTGRES_USER=${NOPAQUE_DB_USERNAME}
- POSTGRES_PASSWORD=${NOPAQUE_DB_PASSWORD}
image: postgres:11
restart: unless-stopped
volumes:
- "/srv/nopaque/db:/var/lib/postgresql/data"
redis:
- "${HOST_DB_DIR:-./db}:/var/lib/postgresql/data"
mq:
image: redis:6
restart: unless-stopped
volumes:
- "redis-trash1:/data"
- "${HOST_MQ_DIR:-./mq}:/data"

View File

View File

@ -1,3 +1,6 @@
# Docker related files
Dockerfile
.dockerignore
*.bak
# Packages
__pycache__

View File

@ -1,7 +1,7 @@
FROM python:3.6-slim-stretch
LABEL maintainer="inf_sfb1288@lists.uni-bielefeld.de"
LABEL authors="Patrick Jentsch <p.jentsch@uni-bielefeld.de>, Stephan Porada <sporada@uni-bielefeld.de>"
ARG UID
@ -18,7 +18,7 @@ RUN apt-get update \
build-essential \
libpq-dev \
wait-for-it \
&& rm -rf /var/lib/apt/lists/*
&& rm -r /var/lib/apt/lists/*
RUN groupadd --gid ${GID} --system nopaque \
@ -33,4 +33,4 @@ RUN python -m venv venv \
&& mkdir logs
ENTRYPOINT ["./docker-entrypoint.sh"]
ENTRYPOINT ["./boot.sh"]

View File

@ -1,15 +1,14 @@
from config import config
from config import Config
from flask import Flask
from flask_login import LoginManager
from flask_mail import Mail
from flask_paranoid import Paranoid
from flask_socketio import SocketIO
from flask_sqlalchemy import SQLAlchemy
import logging
config = Config()
db = SQLAlchemy()
logger = logging.getLogger(__name__)
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
mail = Mail()
@ -18,44 +17,36 @@ paranoid.redirect_view = '/'
socketio = SocketIO()
def create_app(config_name):
def create_app():
app = Flask(__name__)
app.config.from_object(config[config_name])
app.config.from_object(config)
config[config_name].init_app(app)
config.init_app(app)
db.init_app(app)
login_manager.init_app(app)
mail.init_app(app)
paranoid.init_app(app)
socketio.init_app(app, message_queue='redis://redis:6379/')
socketio.init_app(app, message_queue=config.SOCKETIO_MESSAGE_QUEUE_URI)
from . import events
from .admin import admin as admin_blueprint
app.register_blueprint(admin_blueprint, url_prefix='/admin')
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
from .content import content as content_blueprint
app.register_blueprint(content_blueprint, url_prefix='/content')
from .corpora import corpora as corpora_blueprint
app.register_blueprint(corpora_blueprint, url_prefix='/corpora')
from .jobs import jobs as jobs_blueprint
app.register_blueprint(jobs_blueprint, url_prefix='/jobs')
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .profile import profile as profile_blueprint
app.register_blueprint(profile_blueprint, url_prefix='/profile')
from .query_results import query_results as query_results_blueprint
app.register_blueprint(query_results_blueprint,
url_prefix='/query_results')
from .services import services as services_blueprint
app.register_blueprint(services_blueprint, url_prefix='/services')

View File

@ -65,7 +65,7 @@ def register():
username=registration_form.username.data)
db.session.add(user)
db.session.commit()
user_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
user_dir = os.path.join(current_app.config['DATA_DIR'],
str(user.id))
if os.path.exists(user_dir):
shutil.rmtree(user_dir)

View File

@ -9,8 +9,6 @@ import cqi
import math
from datetime import datetime
import time
from app import logger
'''
' A dictionary containing lists of, with corpus ids associated, Socket.IO
@ -41,7 +39,8 @@ def corpus_analysis_get_meta_data(corpus_id):
metadata['corpus_name'] = db_corpus.title
metadata['corpus_description'] = db_corpus.description
metadata['corpus_creation_date'] = db_corpus.creation_date.isoformat()
metadata['corpus_last_edited_date'] = db_corpus.last_edited_date.isoformat()
metadata['corpus_last_edited_date'] = \
db_corpus.last_edited_date.isoformat()
client = corpus_analysis_clients.get(request.sid)
if client is None:
response = {'code': 424, 'desc': 'No client found for this session',
@ -61,18 +60,20 @@ def corpus_analysis_get_meta_data(corpus_id):
metadata['corpus_size_tokens'] = client_corpus.attrs['size']
text_attr = client_corpus.structural_attributes.get('text')
struct_attrs = client_corpus.structural_attributes.list(filters={'part_of': text_attr})
struct_attrs = client_corpus.structural_attributes.list(
filters={'part_of': text_attr})
text_ids = range(0, (text_attr.attrs['size']))
texts_metadata = {}
for text_id in text_ids:
texts_metadata[text_id] = {}
for struct_attr in struct_attrs:
texts_metadata[text_id][struct_attr.attrs['name'][(len(text_attr.attrs['name']) + 1):]] = struct_attr.values_by_ids(list(range(struct_attr.attrs['size'])))[text_id]
texts_metadata[text_id][struct_attr.attrs['name'][(len(text_attr.attrs['name']) + 1):]] = struct_attr.values_by_ids(list(range(struct_attr.attrs['size'])))[text_id] # noqa
metadata['corpus_all_texts'] = texts_metadata
metadata['corpus_analysis_date'] = datetime.utcnow().isoformat()
metadata['corpus_cqi_py_protocol_version'] = client.api.version
metadata['corpus_cqi_py_package_version'] = cqi.__version__
metadata['corpus_cqpserver_version'] = 'CQPserver v3.4.22' # TODO: make this dynamically
# TODO: make this dynamically
metadata['corpus_cqpserver_version'] = 'CQPserver v3.4.22'
# write some metadata to the db
db_corpus.current_nr_of_tokens = metadata['corpus_size_tokens']
@ -133,7 +134,7 @@ def corpus_analysis_query(query):
if (results.attrs['size'] == 0):
progress = 100
else:
progress = ((chunk_start + chunk_size) / results.attrs['size']) * 100
progress = ((chunk_start + chunk_size) / results.attrs['size']) * 100 # noqa
progress = min(100, int(math.ceil(progress)))
response = {'code': 200, 'desc': None, 'msg': 'OK',
'payload': {'chunk': chunk, 'progress': progress}}
@ -202,7 +203,9 @@ def corpus_analysis_get_match_with_full_context(payload):
'payload': payload,
'type': type,
'data_indexes': data_indexes}
socketio.emit('corpus_analysis_get_match_with_full_context', response, room=request.sid)
socketio.emit('corpus_analysis_get_match_with_full_context',
response,
room=request.sid)
client.status = 'ready'

View File

@ -21,7 +21,7 @@ def add_corpus():
status='unprepared', title=add_corpus_form.title.data)
db.session.add(corpus)
db.session.commit()
dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
dir = os.path.join(current_app.config['DATA_DIR'],
str(corpus.user_id), 'corpora', str(corpus.id))
try:
os.makedirs(dir)
@ -109,7 +109,7 @@ def add_corpus_file(corpus_id):
# Save the file
dir = os.path.join(str(corpus.user_id), 'corpora', str(corpus.id))
add_corpus_file_form.file.data.save(
os.path.join(current_app.config['NOPAQUE_STORAGE'], dir,
os.path.join(current_app.config['DATA_DIR'], dir,
add_corpus_file_form.file.data.filename))
corpus_file = CorpusFile(
address=add_corpus_file_form.address.data,
@ -163,7 +163,7 @@ def download_corpus_file(corpus_id, corpus_file_id):
if not (corpus_file.corpus.creator == current_user
or current_user.is_administrator()):
abort(403)
dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
dir = os.path.join(current_app.config['DATA_DIR'],
corpus_file.dir)
return send_from_directory(as_attachment=True, directory=dir,
filename=corpus_file.filename)

View File

@ -1,15 +1,11 @@
from flask import current_app, render_template
from flask import render_template
from flask_mail import Message
from . import mail
from .decorators import background
def create_message(recipient, subject, template, **kwargs):
app = current_app._get_current_object()
sender = app.config['NOPAQUE_MAIL_SENDER']
subject_prefix = app.config['NOPAQUE_MAIL_SUBJECT_PREFIX']
msg = Message('{} {}'.format(subject_prefix, subject),
recipients=[recipient], sender=sender)
msg = Message('[nopaque] {}'.format(subject), recipients=[recipient])
msg.body = render_template('{}.txt.j2'.format(template), **kwargs)
msg.html = render_template('{}.html.j2'.format(template), **kwargs)
return msg

View File

@ -44,7 +44,7 @@ def download_job_input(job_id, job_input_id):
if not (job_input.job.creator == current_user
or current_user.is_administrator()):
abort(403)
dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
dir = os.path.join(current_app.config['DATA_DIR'],
job_input.dir)
return send_from_directory(as_attachment=True, directory=dir,
filename=job_input.filename)
@ -72,7 +72,7 @@ def download_job_result(job_id, job_result_id):
if not (job_result.job.creator == current_user
or current_user.is_administrator()):
abort(403)
dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
dir = os.path.join(current_app.config['DATA_DIR'],
job_result.dir)
return send_from_directory(as_attachment=True, directory=dir,
filename=job_result.filename)

View File

@ -1,12 +0,0 @@
from flask_wtf import FlaskForm
from wtforms import DecimalField, StringField, SubmitField, TextAreaField
from wtforms.validators import DataRequired, Email, Length, NumberRange
class FeedbackForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
feedback = TextAreaField('Feedback', validators=[Length(0, 255)])
like_range = DecimalField('How would you rate nopaque?',
validators=[DataRequired(),
NumberRange(min=1, max=10)])
submit = SubmitField('Send feedback')

View File

@ -1,8 +1,6 @@
from flask import flash, redirect, render_template, url_for
from flask_login import login_required, login_user
from . import main
from .forms import FeedbackForm
from .. import logger
from ..auth.forms import LoginForm
from ..models import User
@ -28,18 +26,6 @@ def dashboard():
return render_template('main/dashboard.html.j2', title='Dashboard')
@main.route('/feedback', methods=['GET', 'POST'])
@login_required
def feedback():
feedback_form = FeedbackForm(prefix='feedback-form')
if feedback_form.validate_on_submit():
logger.warning(feedback_form.email)
logger.warning(feedback_form.feedback)
logger.warning(feedback_form.like_range)
return render_template('main/feedback.html.j2',
feedback_form=feedback_form, title='Feedback')
@main.route('/poster', methods=['GET', 'POST'])
def poster():
login_form = LoginForm(prefix='login-form')

View File

@ -166,7 +166,7 @@ class User(UserMixin, db.Model):
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
if self.role is None:
if self.email == current_app.config['NOPAQUE_ADMIN']:
if self.email == current_app.config['ADMIN_EMAIL_ADRESS']:
self.role = Role.query.filter_by(name='Administrator').first()
if self.role is None:
self.role = Role.query.filter_by(default=True).first()
@ -251,7 +251,7 @@ class User(UserMixin, db.Model):
'''
Delete the user and its corpora and jobs from database and filesystem.
'''
user_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
user_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.id))
shutil.rmtree(user_dir, ignore_errors=True)
db.session.delete(self)
@ -383,7 +383,7 @@ class Job(db.Model):
db.session.commit()
sleep(1)
db.session.refresh(self)
job_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
job_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id),
'jobs',
str(self.id))
@ -397,7 +397,7 @@ class Job(db.Model):
if self.status != 'failed':
raise Exception('Could not restart job: status is not "failed"')
job_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
job_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id),
'jobs',
str(self.id))
@ -508,7 +508,7 @@ class CorpusFile(db.Model):
title = db.Column(db.String(255))
def delete(self):
corpus_file_path = os.path.join(current_app.config['NOPAQUE_STORAGE'],
corpus_file_path = os.path.join(current_app.config['DATA_DIR'],
str(self.corpus.user_id),
'corpora',
str(self.corpus_id),
@ -570,7 +570,7 @@ class Corpus(db.Model):
'files': {file.id: file.to_dict() for file in self.files}}
def build(self):
corpus_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
corpus_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id),
'corpora',
str(self.id))
@ -606,7 +606,7 @@ class Corpus(db.Model):
self.status = 'submitted'
def delete(self):
corpus_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
corpus_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id),
'corpora',
str(self.id))
@ -636,7 +636,7 @@ class QueryResult(db.Model):
title = db.Column(db.String(32))
def delete(self):
query_result_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
query_result_dir = os.path.join(current_app.config['DATA_DIR'],
str(self.user_id),
'query_results',
str(self.id))

View File

@ -31,7 +31,7 @@ def add_query_result():
db.session.add(query_result)
db.session.commit()
# create paths to save the uploaded json file
query_result_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
query_result_dir = os.path.join(current_app.config['DATA_DIR'],
str(current_user.id),
'query_results',
str(query_result.id))
@ -106,7 +106,7 @@ def inspect_query_result(query_result_id):
prefix='inspect-display-options-form'
)
query_result_file_path = os.path.join(
current_app.config['NOPAQUE_STORAGE'],
current_app.config['DATA_DIR'],
str(current_user.id),
'query_results',
str(query_result.id),
@ -141,7 +141,7 @@ def download_query_result(query_result_id):
if not (query_result.creator == current_user
or current_user.is_administrator()):
abort(403)
query_result_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
query_result_dir = os.path.join(current_app.config['DATA_DIR'],
str(current_user.id),
'query_results',
str(query_result.id))

View File

@ -55,7 +55,7 @@ def service(service):
db.session.add(job)
db.session.commit()
relative_dir = os.path.join(str(job.user_id), 'jobs', str(job.id))
absolut_dir = os.path.join(current_app.config['NOPAQUE_STORAGE'],
absolut_dir = os.path.join(current_app.config['DATA_DIR'],
relative_dir)
try:
os.makedirs(absolut_dir)

View File

@ -1,35 +0,0 @@
{% extends "nopaque.html.j2" %}
{% block page_content %}
<div class="col s12">
<div class="card">
<form method="POST">
{{ feedback_form.hidden_tag() }}
<div class="card-content">
<p class="range-field">
{{ feedback_form.like_range.label }}
{{ feedback_form.like_range(class='validate', type='range', min=1, max=10) }}
</p>
<div class="input-field">
<i class="material-icons prefix">email</i>
{{ feedback_form.email(class='validate', type='email') }}
{{ feedback_form.email.label }}
{% for error in feedback_form.email.errors %}
<span class="helper-text red-text">{{ error }}</span>
{% endfor %}
</div>
<div class="input-field">
<i class="material-icons prefix">mode_edit</i>
{{ feedback_form.feedback(class='materialize-textarea', data_length=255) }}
{{ feedback_form.feedback.label }}
</div>
</div>
<div class="card-action right-align">
{{ M.render_field(feedback_form.submit, material_icon='send') }}
</div>
</form>
</div>
</div>
{% endblock %}

View File

@ -1,202 +0,0 @@
{% extends "nopaque.html.j2" %}
{% set parallax = True %}
{% block page_content %}
<style>
{% if request.args.get('print') == 'True' %}
html {
/* DIN 0 bei 150dpi */
width: 4697;
height: 7022px;
}
div.navbar-fixed {
transform: scale(3);
transform-origin: 0 0;
}
footer.page-footer {
transform: scale(3);
transform-origin: 0 0;
margin-top: 5496px;
}
.print-transform {
transform: scale(3);
transform-origin: 0 0;
}
{% endif %}
.parallax-container {
height: 321px;
}
</style>
<div class="print-transform">
<div class="section">
<div class="row container">
<div class="col s12 m5">
<h1>nopaque</h1>
<p>From text to data to analysis</p>
<p class="light">Patrick Jentsch, Stephan Porada and Helene Schlicht</p>
</div>
<div class="col s12 m7">
<p>&nbsp;</p>
<div class="card">
<div class="card-content">
<div class="row">
<div class="col s3">
<p>&nbsp;</p>
<img class="responsive-img" src="https://www.uni-bielefeld.de/sfb1288/images/Logo_SFB1288_DE_300dpi.png">
</div>
<div class="col s9">
<p>nopaque is a web application that helps to convert heterogeneous textual source material into standard-compliant research data for subsequent analysis. nopaque is designed to accompany your research process.</p>
<p>The web application is developed within the DFG-funded Collaborative Research Center (SFB) 1288 "Practices of Comparison" by the subproject INF "Data Infrastructure and Digital Humanities".</p>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="parallax-container">
<img src="{{ url_for('static', filename='images/parallax_hq/books_antique_book_old.jpg') }}" width="100%" alt="" style="margin-top: -200px;">
</div>
<div class="section white scrollspy" id="information">
<div class="row container">
<div class="col s12">
<div class="row">
<div class="col s12">
<h3>Why you should use nopaque</h3>
<p>nopaque is a custom-built web application for researchers who want to get out more of their images and texts without having to bother about the technical side of things. You can focus on what really interests you, nopaque does the rest.</p>
<p>nopaques utilization of container virtualization guarantees high interoperability, reusability and reproducibility of research results. All processing steps are carried out in containers created on demand, based on static images with fixed software versions including all dependencies.</p>
</div>
<div class="col s12">
<div class="row">
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">flash_on</i>
<p>Speeds up your work</p>
<p class="light">All tools provided by nopaque are carefully selected to provide a complete tool suite without being held up by compatibility issues.</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">cloud</i>
<p>Cloud infrastructure</p>
<p class="light">All computational work is processed within nopaques cloud infrastructure. You don't need to install any software. Great, right?</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">group</i>
<p>User friendly</p>
<p class="light">You can start right away without having to read mile-long manuals. All services come with default settings that make it easy for you to just get going. Also great, right?</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">settings</i>
<p>Meshing processes</p>
<p class="light">No matter where you step in, nopaque facilitates and accompanies your research. Its workflow perfectly ties in with your research process.</p>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="parallax-container">
<img src="{{ url_for('static', filename='images/parallax_hq/concept_document_focus_letter.jpg') }}" width="100%" alt="" style="margin-top: -350px;">
</div>
<div class="section white scrollspy" id="services">
<div class="row container">
<div class="col s12">
<div class="row">
<div class="col s12">
<h3>What nopaque can do for you</h3>
<p>All services and processes are logically linked and built upon each other. You can follow them step by step or directly choose the one that suits your needs best. And while the process is computed in nopaques cloud infrastructure, you can just keep working.</p>
</div>
<div class="col s12">
<br class="hide-on-small-only">
<div class="row">
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">burst_mode</i>
<p>File setup</p>
<p class="light">Digital copies of text based research data (books, letters, etc.) often comprise various files and formats. nopaque converts and merges those files to facilitate further processing and the application of other services.</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">find_in_page</i>
<p>Optical Character Recognition</p>
<p class="light">nopaque converts your image data like photos or scans into text data through OCR making it machine readable. This step enables you to proceed with further computational analysis of your documents.</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">format_textdirection_l_to_r</i>
<p>Natural Language Processing</p>
<p class="light">By means of computational linguistic data processing (tokenization, lemmatization, part-of-speech tagging and named-entity recognition) nopaque extracts additional information from your text.</p>
</div>
<div class="col s12 m6 l3 center-align">
<i class="large material-icons" style="color: #ee6e73;">search</i>
<p>Corpus analysis</p>
<p class="light">nopaque lets you create and upload as many text corpora as you want. It makes use of CQP Query Language, which allows for complex search requests with the aid of metadata and NLP tags.</p>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="parallax-container">
<img src="{{ url_for('static', filename='images/parallax_hq/text_data_wide.png') }}" width="100%" alt="" style="margin-top: -450px;">
</div>
<div class="section white scrollspy" id="registration-and-log-in">
<div class="row container">
<div class="col s12">
<div class="row">
<!--
<div class="col s12 m4">
<h3>Registration and Log in</h3>
<p>Want to boost your research and get going? nopaque is free and no download is needed. Register now!</p>
<a class="btn waves-effect waves-light" href="{{ url_for('auth.register') }}"><i class="material-icons left">person_add</i>Register</a>
</div>-->
<div class="col s12">
<div class="card">
<form method="POST">
{{ login_form.hidden_tag() }}
<div class="card-content">
<span class="card-title">Registration and Log in</span>
<div class="input-field">
<i class="material-icons prefix">person</i>
{{ login_form.user(class='validate') }}
{{ login_form.user.label }}
{% for error in login_form.user.errors %}
<span class="helper-text red-text">{{ error }}</span>
{% endfor %}
</div>
<div class="input-field">
<i class="material-icons prefix">vpn_key</i>
{{ login_form.password(class='validate') }}
{{ login_form.password.label }}
{% for error in login_form.password.errors %}
<span class="helper-text red-text">{{ error }}</span>
{% endfor %}
</div>
<div class="row" style="margin-bottom: 0;">
<div class="col s6 left-align">
<a href="{{ url_for('auth.reset_password_request') }}">Forgot your password?</a>
|
<a href="{{ url_for('auth.reset_password_request') }}">No account yet?</a>
</div>
<div class="col s6 right-align">
{{ materialize.submit_button(login_form.submit) }}
</div>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
{% endblock %}

21
web/boot.sh Executable file
View File

@ -0,0 +1,21 @@
#!/bin/bash
echo "Waiting for db..."
wait-for-it "${NOPAQUE_DB_HOST}:${NOPAQUE_DB_PORT:-5432}" --strict --timeout=0
echo "Waiting for mq..."
wait-for-it "${NOPAQUE_MQ_HOST}:${NOPAQUE_MQ_PORT}" --strict --timeout=0
source venv/bin/activate
if [ "$#" -eq 0 ]; then
flask deploy
python nopaque.py
elif [[ "$1" == "flask" ]]; then
exec ${@:1}
else
echo "$0 [COMMAND]"
echo ""
echo "nopaque startup script"
echo ""
echo "Management Commands:"
echo " flask"
fi

View File

@ -1,85 +1,97 @@
from werkzeug.middleware.proxy_fix import ProxyFix
import os
import logging
import os
root_dir = os.path.abspath(os.path.dirname(__file__))
DEFAULT_DATA_DIR = os.path.join('/mnt/data')
DEFAULT_DB_PORT = '5432'
DEFAULT_DEBUG = 'False'
DEFAULT_LOG_DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
DEFAULT_LOG_FILE = os.path.join(root_dir, 'nopaque.log')
DEFAULT_LOG_FORMAT = ('[%(asctime)s] %(levelname)s in %(pathname)s '
'(function: %(funcName)s, line: %(lineno)d): '
'%(message)s')
DEFAULT_LOG_LEVEL = 'ERROR'
DEFAULT_SMTP_USE_SSL = 'False'
DEFAULT_SMTP_USE_TLS = 'False'
DEFAULT_NUM_PROXIES = '0'
DEFAULT_PROTOCOL = 'http'
DEFAULT_RESSOURCES_PER_PAGE = '5'
DEFAULT_USERS_PER_PAGE = '10'
DEFAULT_SECRET_KEY = 'hard to guess string'
class Config:
''' ### Flask ### '''
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
''' ### Flask-Mail ### '''
MAIL_SERVER = os.environ.get('MAIL_SERVER')
MAIL_PORT = int(os.environ.get('MAIL_PORT'))
MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS').lower() == 'true'
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
''' ### Flask-SQLAlchemy ### '''
SQLALCHEMY_DATABASE_URI = 'postgresql://{}:{}@db/{}'.format(
os.environ.get('POSTGRES_USER'),
os.environ.get('POSTGRES_PASSWORD'),
os.environ.get('POSTGRES_DB_NAME'))
''' ### Database ### '''
DB_HOST = os.environ.get('NOPAQUE_DB_HOST')
DB_NAME = os.environ.get('NOPAQUE_DB_NAME')
DB_PASSWORD = os.environ.get('NOPAQUE_DB_PASSWORD')
DB_PORT = os.environ.get('NOPAQUE_DB_PORT', DEFAULT_DB_PORT)
DB_USERNAME = os.environ.get('NOPAQUE_DB_USERNAME')
SQLALCHEMY_DATABASE_URI = 'postgresql://{}:{}@{}:{}/{}'.format(
DB_USERNAME, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME)
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
''' ### nopaque ### '''
NOPAQUE_ADMIN = os.environ.get('NOPAQUE_ADMIN')
NOPAQUE_CONTACT = os.environ.get('NOPAQUE_CONTACT')
NOPAQUE_MAIL_SENDER = os.environ.get('NOPAQUE_MAIL_SENDER')
NOPAQUE_MAIL_SUBJECT_PREFIX = '[nopaque]'
NOPAQUE_PROTOCOL = os.environ.get('NOPAQUE_PROTOCOL')
NOPAQUE_STORAGE = os.environ.get('NOPAQUE_STORAGE')
''' ### Email ### '''
MAIL_DEFAULT_SENDER = os.environ.get('NOPAQUE_SMTP_DEFAULT_SENDER')
MAIL_PASSWORD = os.environ.get('NOPAQUE_SMTP_PASSWORD')
MAIL_PORT = os.environ.get('NOPAQUE_SMTP_PORT')
MAIL_SERVER = os.environ.get('NOPAQUE_SMTP_SERVER')
MAIL_USERNAME = os.environ.get('NOPAQUE_SMTP_USERNAME')
MAIL_USE_SSL = os.environ.get('NOPAQUE_SMTP_USE_SSL',
DEFAULT_SMTP_USE_SSL).lower() == 'true'
MAIL_USE_TLS = os.environ.get('NOPAQUE_SMTP_USE_TLS',
DEFAULT_SMTP_USE_TLS).lower() == 'true'
os.makedirs('logs', exist_ok=True)
logging.basicConfig(filename='logs/nopaque.log',
format='[%(asctime)s] %(levelname)s in '
'%(pathname)s:%(lineno)d - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S', filemode='w')
''' ### Security enhancements ### '''
if NOPAQUE_PROTOCOL == 'https':
''' ### Flask ### '''
SESSION_COOKIE_SECURE = True
''' ### Flask-Login ### '''
''' ### General ### '''
ADMIN_EMAIL_ADRESS = os.environ.get('NOPAQUE_ADMIN_EMAIL_ADRESS')
CONTACT_EMAIL_ADRESS = os.environ.get('NOPAQUE_CONTACT_EMAIL_ADRESS')
DATA_DIR = os.environ.get('NOPAQUE_DATA_DIR', DEFAULT_DATA_DIR)
DEBUG = os.environ.get('NOPAQUE_DEBUG', DEFAULT_DEBUG).lower() == 'true'
NUM_PROXIES = int(os.environ.get('NOPAQUE_NUM_PROXIES',
DEFAULT_NUM_PROXIES))
PROTOCOL = os.environ.get('NOPAQUE_PROTOCOL', DEFAULT_PROTOCOL)
RESSOURCES_PER_PAGE = int(os.environ.get('NOPAQUE_RESSOURCES_PER_PAGE',
DEFAULT_RESSOURCES_PER_PAGE))
SECRET_KEY = os.environ.get('NOPAQUE_SECRET_KEY', DEFAULT_SECRET_KEY)
USERS_PER_PAGE = int(os.environ.get('NOPAQUE_USERS_PER_PAGE',
DEFAULT_USERS_PER_PAGE))
if PROTOCOL == 'https':
REMEMBER_COOKIE_HTTPONLY = True
REMEMBER_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
@staticmethod
def init_app(app):
proxy_fix_kwargs = {'x_for': 1, 'x_host': 1, 'x_port': 1, 'x_proto': 1}
app.wsgi_app = ProxyFix(app.wsgi_app, **proxy_fix_kwargs)
''' ### Logging ### '''
LOG_DATE_FORMAT = os.environ.get('NOPAQUE_LOG_DATE_FORMAT',
DEFAULT_LOG_DATE_FORMAT)
LOG_FILE = os.environ.get('NOPAQUE_LOG_FILE', DEFAULT_LOG_FILE)
LOG_FORMAT = os.environ.get('NOPAQUE_LOG_FORMAT', DEFAULT_LOG_FORMAT)
LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL', DEFAULT_LOG_LEVEL)
''' ### Message queue ### '''
MQ_HOST = os.environ.get('NOPAQUE_MQ_HOST')
MQ_PORT = os.environ.get('NOPAQUE_MQ_PORT')
MQ_TYPE = os.environ.get('NOPAQUE_MQ_TYPE')
SOCKETIO_MESSAGE_QUEUE_URI = \
'{}://{}:{}/'.format(MQ_TYPE, MQ_HOST, MQ_PORT)
class DevelopmentConfig(Config):
''' ### Flask ### '''
DEBUG = True
''' ### nopaque ### '''
NOPAQUE_LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL') or 'DEBUG'
logging.basicConfig(level=NOPAQUE_LOG_LEVEL)
class TestingConfig(Config):
''' ### Flask ### '''
TESTING = True
''' ### Flask-SQLAlchemy ### '''
SQLALCHEMY_DATABASE_URI = 'sqlite://'
''' ### Flask-WTF ### '''
WTF_CSRF_ENABLED = False
class ProductionConfig(Config):
''' ### nopaque ### '''
NOPAQUE_LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL') or 'ERROR'
logging.basicConfig(level=NOPAQUE_LOG_LEVEL)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig,
}
def init_app(self, app):
# Configure logging according to the corresponding (LOG_*) config
# entries
logging.basicConfig(datefmt=self.LOG_DATE_FORMAT,
filename=self.LOG_FILE,
format=self.LOG_FORMAT,
level=self.LOG_LEVEL)
# Apply the ProxyFix middleware if nopaque is running behind reverse
# proxies. (NUM_PROXIES indicates the number of reverse proxies running
# in front of nopaque)
if self.NUM_PROXIES > 0:
app.wsgi_app = ProxyFix(app.wsgi_app,
x_for=self.NUM_PROXIES,
x_host=self.NUM_PROXIES,
x_port=self.NUM_PROXIES,
x_proto=self.NUM_PROXIES)

View File

@ -1,16 +0,0 @@
#!/bin/bash
echo "Waiting for db..."
wait-for-it db:5432 --strict --timeout=0
echo "Waiting for redis..."
wait-for-it redis:6379 --strict --timeout=0
source venv/bin/activate
if [ $# -eq 0 ]; then
flask deploy
python nopaque.py
elif [ $1 == "flask" ]; then
flask ${@:2}
else
echo "$0 [flask [options]]"
fi

View File

@ -5,9 +5,8 @@ from app.models import (Corpus, CorpusFile, Job, JobInput, JobResult,
NotificationData, NotificationEmailData, QueryResult,
Role, User)
from flask_migrate import Migrate, upgrade
import os
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
app = create_app()
migrate = Migrate(app, db, compare_type=True)

View File

@ -17,6 +17,3 @@ class BasicsTestCase(unittest.TestCase):
def test_app_exists(self):
self.assertFalse(current_app is None)
def test_app_is_testing(self):
self.assertTrue(current_app.config['TESTING'])