Change directory structure (move ./nopaque/* to ./)
54
app/__init__.py
Normal file
@ -0,0 +1,54 @@
|
||||
from config import config
|
||||
from flask import Flask
|
||||
from flask_login import LoginManager
|
||||
from flask_mail import Mail
|
||||
from flask_paranoid import Paranoid
|
||||
from flask_socketio import SocketIO
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
import flask_assets
|
||||
|
||||
|
||||
assets = flask_assets.Environment()
|
||||
db = SQLAlchemy()
|
||||
login_manager = LoginManager()
|
||||
login_manager.login_view = 'auth.login'
|
||||
mail = Mail()
|
||||
paranoid = Paranoid()
|
||||
paranoid.redirect_view = '/'
|
||||
socketio = SocketIO()
|
||||
|
||||
|
||||
def create_app(config_name):
|
||||
app = Flask(__name__)
|
||||
app.config.from_object(config[config_name])
|
||||
|
||||
assets.init_app(app)
|
||||
config[config_name].init_app(app)
|
||||
db.init_app(app)
|
||||
login_manager.init_app(app)
|
||||
mail.init_app(app)
|
||||
paranoid.init_app(app)
|
||||
socketio.init_app(
|
||||
app, message_queue=app.config['NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI'])
|
||||
|
||||
with app.app_context():
|
||||
from . import events
|
||||
from .admin import admin as admin_blueprint
|
||||
from .auth import auth as auth_blueprint
|
||||
from .corpora import corpora as corpora_blueprint
|
||||
from .errors import errors as errors_blueprint
|
||||
from .jobs import jobs as jobs_blueprint
|
||||
from .main import main as main_blueprint
|
||||
from .services import services as services_blueprint
|
||||
from .settings import settings as settings_blueprint
|
||||
|
||||
app.register_blueprint(admin_blueprint, url_prefix='/admin')
|
||||
app.register_blueprint(auth_blueprint, url_prefix='/auth')
|
||||
app.register_blueprint(corpora_blueprint, url_prefix='/corpora')
|
||||
app.register_blueprint(errors_blueprint)
|
||||
app.register_blueprint(jobs_blueprint, url_prefix='/jobs')
|
||||
app.register_blueprint(main_blueprint)
|
||||
app.register_blueprint(services_blueprint, url_prefix='/services')
|
||||
app.register_blueprint(settings_blueprint, url_prefix='/settings')
|
||||
|
||||
return app
|
5
app/admin/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from flask import Blueprint
|
||||
|
||||
|
||||
admin = Blueprint('admin', __name__)
|
||||
from . import views
|
14
app/admin/forms.py
Normal file
@ -0,0 +1,14 @@
|
||||
from flask_login import current_user
|
||||
from wtforms import BooleanField, SelectField
|
||||
from ..models import Role
|
||||
from ..settings.forms import EditGeneralSettingsForm
|
||||
|
||||
|
||||
class EditGeneralSettingsAdminForm(EditGeneralSettingsForm):
|
||||
confirmed = BooleanField('Confirmed')
|
||||
role = SelectField('Role', coerce=int)
|
||||
|
||||
def __init__(self, user=current_user, *args, **kwargs):
|
||||
super().__init__(*args, user=user, **kwargs)
|
||||
self.role.choices = [(role.id, role.name)
|
||||
for role in Role.query.order_by(Role.name).all()]
|
65
app/admin/views.py
Normal file
@ -0,0 +1,65 @@
|
||||
from flask import flash, redirect, render_template, url_for
|
||||
from flask_login import login_required
|
||||
from . import admin
|
||||
from .forms import EditGeneralSettingsAdminForm
|
||||
from .. import db
|
||||
from ..decorators import admin_required
|
||||
from ..models import Role, User
|
||||
from ..settings import tasks as settings_tasks
|
||||
|
||||
|
||||
@admin.route('/')
|
||||
@login_required
|
||||
@admin_required
|
||||
def index():
|
||||
return redirect(url_for('.users'))
|
||||
|
||||
|
||||
@admin.route('/users')
|
||||
@login_required
|
||||
@admin_required
|
||||
def users():
|
||||
# users = [user.to_dict() for user in User.query.all()]
|
||||
users = {user.id: user.to_dict() for user in User.query.all()}
|
||||
return render_template('admin/users.html.j2', title='Users', users=users)
|
||||
|
||||
|
||||
@admin.route('/users/<int:user_id>')
|
||||
@login_required
|
||||
@admin_required
|
||||
def user(user_id):
|
||||
user = User.query.get_or_404(user_id)
|
||||
return render_template('admin/user.html.j2', title='User', user=user)
|
||||
|
||||
|
||||
@admin.route('/users/<int:user_id>/delete')
|
||||
@login_required
|
||||
@admin_required
|
||||
def delete_user(user_id):
|
||||
settings_tasks.delete_user(user_id)
|
||||
flash('User has been marked for deletion!')
|
||||
return redirect(url_for('.users'))
|
||||
|
||||
|
||||
@admin.route('/users/<int:user_id>/edit', methods=['GET', 'POST']) # noqa
|
||||
@login_required
|
||||
@admin_required
|
||||
def edit_user(user_id):
|
||||
user = User.query.get_or_404(user_id)
|
||||
form = EditGeneralSettingsAdminForm(user=user)
|
||||
if form.validate_on_submit():
|
||||
user.setting_dark_mode = form.dark_mode.data
|
||||
user.email = form.email.data
|
||||
user.username = form.username.data
|
||||
user.confirmed = form.confirmed.data
|
||||
user.role = Role.query.get(form.role.data)
|
||||
db.session.commit()
|
||||
flash('Settings have been updated.')
|
||||
return redirect(url_for('.edit_user', user_id=user.id))
|
||||
form.confirmed.data = user.confirmed
|
||||
form.dark_mode.data = user.setting_dark_mode
|
||||
form.email.data = user.email
|
||||
form.role.data = user.role_id
|
||||
form.username.data = user.username
|
||||
return render_template('admin/edit_user.html.j2', form=form,
|
||||
title='Edit user', user=user)
|
5
app/auth/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from flask import Blueprint
|
||||
|
||||
|
||||
auth = Blueprint('auth', __name__)
|
||||
from . import views
|
62
app/auth/forms.py
Normal file
@ -0,0 +1,62 @@
|
||||
from flask import current_app
|
||||
from ..models import User
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import (BooleanField, PasswordField, StringField, SubmitField,
|
||||
ValidationError)
|
||||
from wtforms.validators import DataRequired, Email, EqualTo, Length, Regexp
|
||||
|
||||
|
||||
class LoginForm(FlaskForm):
|
||||
user = StringField('Email or username', validators=[DataRequired()])
|
||||
password = PasswordField('Password', validators=[DataRequired()])
|
||||
remember_me = BooleanField('Keep me logged in')
|
||||
submit = SubmitField('Log In')
|
||||
|
||||
|
||||
class RegistrationForm(FlaskForm):
|
||||
email = StringField('Email', validators=[DataRequired(), Email()])
|
||||
username = StringField(
|
||||
'Username',
|
||||
validators=[DataRequired(), Length(1, 64),
|
||||
Regexp(current_app.config['NOPAQUE_USERNAME_REGEX'],
|
||||
message='Usernames must have only letters, numbers,'
|
||||
' dots or underscores')]
|
||||
)
|
||||
password = PasswordField(
|
||||
'Password',
|
||||
validators=[DataRequired(), EqualTo('password_confirmation',
|
||||
message='Passwords must match.')]
|
||||
)
|
||||
password_confirmation = PasswordField(
|
||||
'Password confirmation',
|
||||
validators=[DataRequired(), EqualTo('password',
|
||||
message='Passwords must match.')]
|
||||
)
|
||||
submit = SubmitField('Register')
|
||||
|
||||
def validate_email(self, field):
|
||||
if User.query.filter_by(email=field.data.lower()).first():
|
||||
raise ValidationError('Email already registered.')
|
||||
|
||||
def validate_username(self, field):
|
||||
if User.query.filter_by(username=field.data).first():
|
||||
raise ValidationError('Username already in use.')
|
||||
|
||||
|
||||
class ResetPasswordForm(FlaskForm):
|
||||
password = PasswordField(
|
||||
'New password',
|
||||
validators=[DataRequired(), EqualTo('password_confirmation',
|
||||
message='Passwords must match.')]
|
||||
)
|
||||
password_confirmation = PasswordField(
|
||||
'Password confirmation',
|
||||
validators=[DataRequired(),
|
||||
EqualTo('password', message='Passwords must match.')]
|
||||
)
|
||||
submit = SubmitField('Reset Password')
|
||||
|
||||
|
||||
class ResetPasswordRequestForm(FlaskForm):
|
||||
email = StringField('Email', validators=[DataRequired(), Email()])
|
||||
submit = SubmitField('Reset Password')
|
152
app/auth/views.py
Normal file
@ -0,0 +1,152 @@
|
||||
from datetime import datetime
|
||||
from flask import abort, flash, redirect, render_template, request, url_for
|
||||
from flask_login import current_user, login_user, login_required, logout_user
|
||||
from . import auth
|
||||
from .forms import (LoginForm, ResetPasswordForm, ResetPasswordRequestForm,
|
||||
RegistrationForm)
|
||||
from .. import db
|
||||
from ..email import create_message, send
|
||||
from ..models import User
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
||||
@auth.before_app_request
|
||||
def before_request():
|
||||
"""
|
||||
Checks if a user is unconfirmed when visiting specific sites. Redirects to
|
||||
unconfirmed view if user is unconfirmed.
|
||||
"""
|
||||
if current_user.is_authenticated:
|
||||
current_user.last_seen = datetime.utcnow()
|
||||
db.session.commit()
|
||||
if (not current_user.confirmed
|
||||
and request.endpoint
|
||||
and request.blueprint != 'auth'
|
||||
and request.endpoint != 'static'):
|
||||
return redirect(url_for('auth.unconfirmed'))
|
||||
|
||||
|
||||
@auth.route('/login', methods=['GET', 'POST'])
|
||||
def login():
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
form = LoginForm(prefix='login-form')
|
||||
if form.validate_on_submit():
|
||||
user = User.query.filter_by(username=form.user.data).first()
|
||||
if user is None:
|
||||
user = User.query.filter_by(email=form.user.data.lower()).first()
|
||||
if user is not None and user.verify_password(form.password.data):
|
||||
login_user(user, form.remember_me.data)
|
||||
next = request.args.get('next')
|
||||
if next is None or not next.startswith('/'):
|
||||
next = url_for('main.dashboard')
|
||||
return redirect(next)
|
||||
flash('Invalid email/username or password.')
|
||||
return render_template('auth/login.html.j2', form=form, title='Log in')
|
||||
|
||||
|
||||
@auth.route('/logout')
|
||||
@login_required
|
||||
def logout():
|
||||
logout_user()
|
||||
flash('You have been logged out.')
|
||||
return redirect(url_for('main.index'))
|
||||
|
||||
|
||||
@auth.route('/register', methods=['GET', 'POST'])
|
||||
def register():
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
form = RegistrationForm(prefix='registration-form')
|
||||
if form.validate_on_submit():
|
||||
user = User(email=form.email.data.lower(),
|
||||
password=form.password.data,
|
||||
username=form.username.data)
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
try:
|
||||
os.makedirs(user.path)
|
||||
except OSError:
|
||||
logging.error('Make dir {} led to an OSError!'.format(user.path))
|
||||
db.session.delete(user)
|
||||
db.session.commit()
|
||||
abort(500)
|
||||
else:
|
||||
token = user.generate_confirmation_token()
|
||||
msg = create_message(user.email, 'Confirm Your Account',
|
||||
'auth/email/confirm', token=token, user=user)
|
||||
send(msg)
|
||||
flash('A confirmation email has been sent to you by email.')
|
||||
return redirect(url_for('.login'))
|
||||
return render_template('auth/register.html.j2', form=form,
|
||||
title='Register')
|
||||
|
||||
|
||||
@auth.route('/confirm/<token>')
|
||||
@login_required
|
||||
def confirm(token):
|
||||
if current_user.confirmed:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
if current_user.confirm(token):
|
||||
db.session.commit()
|
||||
flash('You have confirmed your account. Thanks!')
|
||||
return redirect(url_for('main.dashboard'))
|
||||
else:
|
||||
flash('The confirmation link is invalid or has expired.')
|
||||
return redirect(url_for('.unconfirmed'))
|
||||
|
||||
|
||||
@auth.route('/unconfirmed')
|
||||
def unconfirmed():
|
||||
if current_user.is_anonymous:
|
||||
return redirect(url_for('main.index'))
|
||||
elif current_user.confirmed:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
return render_template('auth/unconfirmed.html.j2', title='Unconfirmed')
|
||||
|
||||
|
||||
@auth.route('/confirm')
|
||||
@login_required
|
||||
def resend_confirmation():
|
||||
token = current_user.generate_confirmation_token()
|
||||
msg = create_message(current_user.email, 'Confirm Your Account',
|
||||
'auth/email/confirm', token=token, user=current_user)
|
||||
send(msg)
|
||||
flash('A new confirmation email has been sent to you by email.')
|
||||
return redirect(url_for('auth.unconfirmed'))
|
||||
|
||||
|
||||
@auth.route('/reset', methods=['GET', 'POST'])
|
||||
def reset_password_request():
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
form = ResetPasswordRequestForm(prefix='reset-password-request-form')
|
||||
if form.validate_on_submit():
|
||||
user = User.query.filter_by(email=form.email.data.lower()).first()
|
||||
if user is not None:
|
||||
token = user.generate_reset_token()
|
||||
msg = create_message(user.email, 'Reset Your Password',
|
||||
'auth/email/reset_password', token=token,
|
||||
user=user)
|
||||
send(msg)
|
||||
flash('An email with instructions to reset your password has been sent to you.') # noqa
|
||||
return redirect(url_for('.login'))
|
||||
return render_template('auth/reset_password_request.html.j2', form=form,
|
||||
title='Password Reset')
|
||||
|
||||
|
||||
@auth.route('/reset/<token>', methods=['GET', 'POST'])
|
||||
def reset_password(token):
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
form = ResetPasswordForm(prefix='reset-password-form')
|
||||
if form.validate_on_submit():
|
||||
if User.reset_password(token, form.password.data):
|
||||
db.session.commit()
|
||||
flash('Your password has been updated.')
|
||||
return redirect(url_for('.login'))
|
||||
else:
|
||||
return redirect(url_for('main.index'))
|
||||
return render_template('auth/reset_password.html.j2', form=form,
|
||||
title='Password Reset', token=token)
|
5
app/corpora/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from flask import Blueprint
|
||||
|
||||
|
||||
corpora = Blueprint('corpora', __name__)
|
||||
from . import events, views # noqa
|
307
app/corpora/events.py
Normal file
@ -0,0 +1,307 @@
|
||||
from datetime import datetime
|
||||
from flask import current_app, request
|
||||
from flask_login import current_user
|
||||
from socket import gaierror
|
||||
from .. import db, socketio
|
||||
from ..decorators import socketio_login_required
|
||||
from ..events import socketio_sessions
|
||||
from ..models import Corpus
|
||||
import cqi
|
||||
import math
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
||||
'''
|
||||
' A dictionary containing lists of, with corpus ids associated, Socket.IO
|
||||
' session ids (sid). {<corpus_id>: [<sid>, ...], ...}
|
||||
'''
|
||||
corpus_analysis_sessions = {}
|
||||
'''
|
||||
' A dictionary containing Socket.IO session id - CQi client pairs.
|
||||
' {<sid>: CQiClient, ...}
|
||||
'''
|
||||
corpus_analysis_clients = {}
|
||||
|
||||
|
||||
@socketio.on('corpus_analysis_init')
|
||||
@socketio_login_required
|
||||
def init_corpus_analysis(corpus_id):
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
if corpus is None:
|
||||
response = {'code': 404, 'desc': None, 'msg': 'Not Found'}
|
||||
socketio.emit('corpus_analysis_init', response, room=request.sid)
|
||||
return
|
||||
if not (corpus.creator == current_user or current_user.is_administrator()): # noqa
|
||||
response = {'code': 403, 'desc': None, 'msg': 'Forbidden'}
|
||||
socketio.emit('corpus_analysis_init', response, room=request.sid)
|
||||
return
|
||||
if corpus.status not in ['prepared', 'start analysis', 'analysing']:
|
||||
response = {'code': 424, 'desc': 'Corpus status is not "prepared", "start analysis" or "analying"', 'msg': 'Failed Dependency'} # noqa
|
||||
socketio.emit('corpus_analysis_init', response, room=request.sid)
|
||||
return
|
||||
if corpus.status == 'prepared':
|
||||
corpus.status = 'start analysis'
|
||||
db.session.commit()
|
||||
event = 'user_{}_patch'.format(current_user.id)
|
||||
jsonpatch = [{'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}] # noqa
|
||||
room = 'user_{}'.format(corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
socketio.start_background_task(corpus_analysis_session_handler,
|
||||
current_app._get_current_object(),
|
||||
corpus_id, current_user.id, request.sid)
|
||||
|
||||
|
||||
def corpus_analysis_session_handler(app, corpus_id, user_id, session_id):
|
||||
with app.app_context():
|
||||
''' Setup analysis session '''
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
retry_counter = 15
|
||||
while corpus.status != 'analysing':
|
||||
db.session.refresh(corpus)
|
||||
retry_counter -= 1
|
||||
if retry_counter == 0:
|
||||
response = {'code': 408, 'desc': 'Corpus analysis session took to long to start', 'msg': 'Request Timeout'} # noqa
|
||||
socketio.emit('corpus_analysis_init', response, room=request.sid) # noqa
|
||||
socketio.sleep(3)
|
||||
client = cqi.CQiClient('cqpserver_{}'.format(corpus_id))
|
||||
try:
|
||||
connect_status = client.connect()
|
||||
payload = {'code': connect_status, 'msg': cqi.api.specification.lookup[connect_status]} # noqa
|
||||
except cqi.errors.CQiException as e:
|
||||
handle_cqi_exception('corpus_analysis_init', e, session_id)
|
||||
return
|
||||
except gaierror:
|
||||
response = {'code': 500, 'desc': None, 'msg': 'Internal Server Error'} # noqa
|
||||
socketio.emit('corpus_analysis_init', response, room=session_id)
|
||||
return
|
||||
corpus_analysis_clients[session_id] = client
|
||||
if corpus_id in corpus_analysis_sessions:
|
||||
corpus_analysis_sessions[corpus_id].append(session_id)
|
||||
else:
|
||||
corpus_analysis_sessions[corpus_id] = [session_id]
|
||||
client.status = 'ready'
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK', 'payload': payload}
|
||||
socketio.emit('corpus_analysis_init', response, room=session_id)
|
||||
''' Observe analysis session '''
|
||||
while session_id in socketio_sessions:
|
||||
socketio.sleep(3)
|
||||
''' Teardown analysis session '''
|
||||
if client.status == 'running':
|
||||
client.status = 'abort'
|
||||
while client.status != 'ready':
|
||||
socketio.sleep(0.1)
|
||||
try:
|
||||
client.disconnect()
|
||||
except cqi.errors.CQiException:
|
||||
pass
|
||||
corpus_analysis_clients.pop(session_id, None)
|
||||
corpus_analysis_sessions[corpus_id].remove(session_id)
|
||||
if not corpus_analysis_sessions[corpus_id]:
|
||||
corpus_analysis_sessions.pop(corpus_id, None)
|
||||
corpus.status = 'stop analysis'
|
||||
db.session.commit()
|
||||
event = 'user_{}_patch'.format(corpus.user_id)
|
||||
jsonpatch = [{'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}] # noqa
|
||||
room = 'user_{}'.format(corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
|
||||
|
||||
@socketio.on('corpus_analysis_meta_data')
|
||||
@socketio_login_required
|
||||
def corpus_analysis_get_meta_data(corpus_id):
|
||||
# get meta data from db
|
||||
db_corpus = Corpus.query.get(corpus_id)
|
||||
metadata = {}
|
||||
metadata['corpus_name'] = db_corpus.title
|
||||
metadata['corpus_description'] = db_corpus.description
|
||||
metadata['corpus_creation_date'] = db_corpus.creation_date.isoformat()
|
||||
metadata['corpus_last_edited_date'] = \
|
||||
db_corpus.last_edited_date.isoformat()
|
||||
client = corpus_analysis_clients.get(request.sid)
|
||||
if client is None:
|
||||
response = {'code': 424, 'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency'}
|
||||
socketio.emit('corpus_analysis_meta_data', response, room=request.sid)
|
||||
return
|
||||
# check if client is busy or not
|
||||
if client.status == 'running':
|
||||
client.status = 'abort'
|
||||
while client.status != 'ready':
|
||||
socketio.sleep(0.1)
|
||||
# get meta data from corpus in cqp server
|
||||
client.status = 'running'
|
||||
try:
|
||||
client_corpus = client.corpora.get('CORPUS')
|
||||
metadata['corpus_properties'] = client_corpus.attrs['properties']
|
||||
metadata['corpus_size_tokens'] = client_corpus.attrs['size']
|
||||
|
||||
text_attr = client_corpus.structural_attributes.get('text')
|
||||
struct_attrs = client_corpus.structural_attributes.list(
|
||||
filters={'part_of': text_attr})
|
||||
text_ids = range(0, (text_attr.attrs['size']))
|
||||
texts_metadata = {}
|
||||
for text_id in text_ids:
|
||||
texts_metadata[text_id] = {}
|
||||
for struct_attr in struct_attrs:
|
||||
texts_metadata[text_id][struct_attr.attrs['name'][(len(text_attr.attrs['name']) + 1):]] = struct_attr.values_by_ids(list(range(struct_attr.attrs['size'])))[text_id] # noqa
|
||||
metadata['corpus_all_texts'] = texts_metadata
|
||||
metadata['corpus_analysis_date'] = datetime.utcnow().isoformat()
|
||||
metadata['corpus_cqi_py_protocol_version'] = client.api.version
|
||||
metadata['corpus_cqi_py_package_version'] = cqi.__version__
|
||||
# TODO: make this dynamically
|
||||
metadata['corpus_cqpserver_version'] = 'CQPserver v3.4.22'
|
||||
|
||||
# write some metadata to the db
|
||||
db_corpus.current_nr_of_tokens = metadata['corpus_size_tokens']
|
||||
db.session.commit()
|
||||
event = 'user_{}_patch'.format(db_corpus.user_id)
|
||||
jsonpatch = [{'op': 'replace', 'path': '/corpora/{}/current_nr_of_tokens'.format(db_corpus.id), 'value': db_corpus.current_nr_of_tokens}] # noqa
|
||||
room = 'user_{}'.format(db_corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
|
||||
# emit data
|
||||
payload = metadata
|
||||
response = {'code': 200, 'desc': 'Corpus meta data', 'msg': 'OK',
|
||||
'payload': payload}
|
||||
socketio.emit('corpus_analysis_meta_data', response, room=request.sid)
|
||||
except cqi.errors.CQiException as e:
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500, 'desc': None, 'msg': 'Internal Server Error',
|
||||
'payload': payload}
|
||||
socketio.emit('corpus_analysis_meta_data', response, room=request.sid)
|
||||
client.status = 'ready'
|
||||
|
||||
|
||||
@socketio.on('corpus_analysis_query')
|
||||
@socketio_login_required
|
||||
def corpus_analysis_query(query):
|
||||
client = corpus_analysis_clients.get(request.sid)
|
||||
if client is None:
|
||||
response = {'code': 424, 'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency'}
|
||||
socketio.emit('corpus_analysis_query', response, room=request.sid)
|
||||
return
|
||||
if client.status == 'running':
|
||||
client.status = 'abort'
|
||||
while client.status != 'ready':
|
||||
socketio.sleep(0.1)
|
||||
client.status = 'running'
|
||||
try:
|
||||
corpus = client.corpora.get('CORPUS')
|
||||
query_status = corpus.query(query)
|
||||
results = corpus.subcorpora.get('Results')
|
||||
except cqi.errors.CQiException as e:
|
||||
client.status = 'ready'
|
||||
handle_cqi_exception('corpus_analysis_query', e, request.sid)
|
||||
return
|
||||
payload = {'status': query_status,
|
||||
'msg': cqi.api.specification.lookup[query_status],
|
||||
'match_count': results.attrs['size']}
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK', 'payload': payload}
|
||||
socketio.emit('corpus_analysis_query', response, room=request.sid)
|
||||
chunk_size = 100
|
||||
chunk_start = 0
|
||||
context = 50
|
||||
progress = 0
|
||||
while chunk_start <= results.attrs['size']:
|
||||
if client.status == 'abort':
|
||||
break
|
||||
try:
|
||||
chunk = results.export(context=context, cutoff=chunk_size, offset=chunk_start) # noqa
|
||||
except cqi.errors.CQiException as e:
|
||||
handle_cqi_exception('corpus_analysis_query', e, request.sid)
|
||||
break
|
||||
if (results.attrs['size'] == 0):
|
||||
progress = 100
|
||||
else:
|
||||
progress = ((chunk_start + chunk_size) / results.attrs['size']) * 100 # noqa
|
||||
progress = min(100, int(math.ceil(progress)))
|
||||
payload = {'chunk': chunk, 'progress': progress}
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK', 'payload': payload}
|
||||
socketio.emit('corpus_analysis_query_results', response, room=request.sid) # noqa
|
||||
chunk_start += chunk_size
|
||||
client.status = 'ready'
|
||||
|
||||
|
||||
@socketio.on('corpus_analysis_get_match_with_full_context')
|
||||
@socketio_login_required
|
||||
def corpus_analysis_get_match_with_full_context(payload):
|
||||
type = payload['type']
|
||||
data_indexes = payload['data_indexes']
|
||||
first_cpos = payload['first_cpos']
|
||||
last_cpos = payload['last_cpos']
|
||||
client = corpus_analysis_clients.get(request.sid)
|
||||
if client is None:
|
||||
response = {'code': 424, 'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency'}
|
||||
socketio.emit('corpus_analysis_get_match_with_full_context', response,
|
||||
room=request.sid)
|
||||
return
|
||||
if client.status == 'running':
|
||||
client.status = 'abort'
|
||||
while client.status != 'ready':
|
||||
socketio.sleep(0.1)
|
||||
client.status = 'running'
|
||||
try:
|
||||
corpus = client.corpora.get('CORPUS')
|
||||
s = corpus.structural_attributes.get('s')
|
||||
except cqi.errors.CQiException as e:
|
||||
handle_cqi_exception('corpus_analysis_get_match_with_full_context', e, request.sid) # noqa
|
||||
return
|
||||
i = 0
|
||||
# Send data one match at a time.
|
||||
for index, f_cpos, l_cpos in zip(data_indexes, first_cpos, last_cpos):
|
||||
i += 1
|
||||
matches = []
|
||||
cpos_lookup = text_lookup = {}
|
||||
try:
|
||||
tmp = s.export(f_cpos, l_cpos, context=10)
|
||||
except cqi.errors.CQiException as e:
|
||||
handle_cqi_exception('corpus_analysis_get_match_with_full_context', e, request.sid) # noqa
|
||||
break
|
||||
matches.append(tmp['matches'][0])
|
||||
cpos_lookup.update(tmp['cpos_lookup'])
|
||||
text_lookup.update(tmp['text_lookup'])
|
||||
progress = i / len(data_indexes) * 100
|
||||
payload = {'matches': matches, 'progress': progress,
|
||||
'cpos_lookup': cpos_lookup, 'text_lookup': text_lookup}
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK', 'payload': payload,
|
||||
'type': type, 'data_indexes': data_indexes}
|
||||
socketio.emit('corpus_analysis_get_match_with_full_context',
|
||||
response, room=request.sid)
|
||||
client.status = 'ready'
|
||||
|
||||
|
||||
@socketio.on('export_corpus')
|
||||
@socketio_login_required
|
||||
def export_corpus(corpus_id):
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
if corpus is None:
|
||||
response = {'code': 404, 'msg': 'Not found'}
|
||||
socketio.emit('export_corpus', response, room=request.sid)
|
||||
return
|
||||
if corpus.status != 'prepared':
|
||||
response = {'code': 412, 'msg': 'Precondition Failed'}
|
||||
socketio.emit('export_corpus', response, room=request.sid)
|
||||
return
|
||||
# delete old corpus archive if it exists/has been build before
|
||||
if corpus.archive_file is not None and os.path.isfile(corpus.archive_file):
|
||||
os.remove(corpus.archive_file)
|
||||
zip_name = corpus.title
|
||||
zip_path = os.path.join(current_user.path, 'corpora', zip_name)
|
||||
corpus.archive_file = os.path.join(corpus.path, zip_name) + '.zip'
|
||||
db.session.commit()
|
||||
shutil.make_archive(zip_path, 'zip', corpus.path)
|
||||
shutil.move(zip_path + '.zip', corpus.archive_file)
|
||||
socketio.emit('export_corpus_' + str(corpus.id), room=request.sid)
|
||||
|
||||
|
||||
def handle_cqi_exception(event, exception, room):
|
||||
response = {'code': 500,
|
||||
'desc': None,
|
||||
'msg': 'Internal Server Error',
|
||||
'payload': {'code': exception.code,
|
||||
'desc': exception.description,
|
||||
'msg': exception.name}}
|
||||
socketio.emit(event, response, room=room)
|
363
app/corpora/events2.py
Normal file
@ -0,0 +1,363 @@
|
||||
from datetime import datetime
|
||||
from flask import current_app, request
|
||||
from flask_login import current_user
|
||||
from socket import gaierror
|
||||
from .. import db, socketio
|
||||
from ..decorators import socketio_login_required
|
||||
from ..events import socketio_sessions
|
||||
from ..models import Corpus, User
|
||||
import cqi
|
||||
import math
|
||||
import os
|
||||
import shutil
|
||||
import logging
|
||||
|
||||
|
||||
'''
|
||||
' A dictionary containing lists of, with corpus ids associated, Socket.IO
|
||||
' session ids (sid). {<corpus_id>: [<sid>, ...], ...}
|
||||
'''
|
||||
corpus_analysis_sessions = {}
|
||||
'''
|
||||
' A dictionary containing Socket.IO session id - CQi client pairs.
|
||||
' {<sid>: CQiClient, ...}
|
||||
'''
|
||||
corpus_analysis_clients = {}
|
||||
|
||||
|
||||
@socketio.on('corpus_analysis_init')
|
||||
@socketio_login_required
|
||||
def init_corpus_analysis(corpus_id):
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
if corpus is None:
|
||||
response = {'code': 404, 'desc': None, 'msg': 'Not Found'}
|
||||
socketio.emit('corpus_analysis_init', response, room=request.sid)
|
||||
return
|
||||
if not (corpus.creator == current_user or current_user.is_administrator()): # noqa
|
||||
response = {'code': 403, 'desc': None, 'msg': 'Forbidden'}
|
||||
socketio.emit('corpus_analysis_init', response, room=request.sid)
|
||||
return
|
||||
if corpus.status not in ['prepared', 'start analysis', 'analysing']:
|
||||
response = {'code': 424, 'desc': 'Corpus status is not "prepared", "start analysis" or "analying"', 'msg': 'Failed Dependency'} # noqa
|
||||
socketio.emit('corpus_analysis_init', response, room=request.sid)
|
||||
return
|
||||
if corpus.status == 'prepared':
|
||||
corpus.status = 'start analysis'
|
||||
db.session.commit()
|
||||
event = 'user_{}_patch'.format(current_user.id)
|
||||
jsonpatch = [{'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}] # noqa
|
||||
room = 'user_{}'.format(corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
socketio.start_background_task(corpus_analysis_session_handler,
|
||||
current_app._get_current_object(),
|
||||
corpus_id, current_user.id, request.sid)
|
||||
|
||||
|
||||
def corpus_analysis_session_handler(app, corpus_id, user_id, session_id):
|
||||
with app.app_context():
|
||||
''' Setup analysis session '''
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
retry_counter = 15
|
||||
while corpus.status != 'analysing':
|
||||
db.session.refresh(corpus)
|
||||
retry_counter -= 1
|
||||
if retry_counter == 0:
|
||||
response = {'code': 408, 'desc': 'Corpus analysis session took to long to start', 'msg': 'Request Timeout'} # noqa
|
||||
socketio.emit('corpus_analysis_init', response, room=request.sid) # noqa
|
||||
socketio.sleep(3)
|
||||
client = cqi.CQiClient('cqpserver_{}'.format(corpus_id))
|
||||
try:
|
||||
connect_status = client.connect()
|
||||
payload = {'code': connect_status, 'msg': cqi.api.specification.lookup[connect_status]} # noqa
|
||||
except cqi.errors.CQiException as e:
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500, 'desc': None,
|
||||
'msg': 'Internal Server Error', 'payload': payload}
|
||||
socketio.emit('corpus_analysis_init', response, room=session_id)
|
||||
return
|
||||
except gaierror:
|
||||
response = {'code': 500, 'desc': None,
|
||||
'msg': 'Internal Server Error'}
|
||||
socketio.emit('corpus_analysis_init', response, room=session_id)
|
||||
return
|
||||
corpus_analysis_clients[session_id] = client
|
||||
if corpus_id in corpus_analysis_sessions:
|
||||
corpus_analysis_sessions[corpus_id].append(session_id)
|
||||
else:
|
||||
corpus_analysis_sessions[corpus_id] = [session_id]
|
||||
client.status = 'ready'
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK', 'payload': payload}
|
||||
socketio.emit('corpus_analysis_init', response, room=session_id)
|
||||
''' Observe analysis session '''
|
||||
while session_id in socketio_sessions:
|
||||
socketio.sleep(3)
|
||||
''' Teardown analysis session '''
|
||||
if client.status == 'running':
|
||||
client.status = 'abort'
|
||||
while client.status != 'ready':
|
||||
socketio.sleep(0.1)
|
||||
try:
|
||||
client.disconnect()
|
||||
except cqi.errors.CQiException:
|
||||
pass
|
||||
corpus_analysis_clients.pop(session_id, None)
|
||||
corpus_analysis_sessions[corpus_id].remove(session_id)
|
||||
if not corpus_analysis_sessions[corpus_id]:
|
||||
corpus_analysis_sessions.pop(corpus_id, None)
|
||||
corpus.status = 'stop analysis'
|
||||
db.session.commit()
|
||||
event = 'user_{}_patch'.format(corpus.user_id)
|
||||
jsonpatch = [{'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}] # noqa
|
||||
room = 'user_{}'.format(corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
|
||||
|
||||
@socketio.on('corpus_analysis_meta_data')
|
||||
@socketio_login_required
|
||||
def corpus_analysis_get_meta_data(corpus_id):
|
||||
# get meta data from db
|
||||
db_corpus = Corpus.query.get(corpus_id)
|
||||
metadata = {}
|
||||
metadata['corpus_name'] = db_corpus.title
|
||||
metadata['corpus_description'] = db_corpus.description
|
||||
metadata['corpus_creation_date'] = db_corpus.creation_date.isoformat()
|
||||
metadata['corpus_last_edited_date'] = \
|
||||
db_corpus.last_edited_date.isoformat()
|
||||
client = corpus_analysis_clients.get(request.sid)
|
||||
if client is None:
|
||||
response = {'code': 424, 'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency'}
|
||||
socketio.emit('corpus_analysis_meta_data', response, room=request.sid)
|
||||
return
|
||||
# check if client is busy or not
|
||||
if client.status == 'running':
|
||||
client.status = 'abort'
|
||||
while client.status != 'ready':
|
||||
socketio.sleep(0.1)
|
||||
# get meta data from corpus in cqp server
|
||||
client.status = 'running'
|
||||
try:
|
||||
client_corpus = client.corpora.get('CORPUS')
|
||||
metadata['corpus_properties'] = client_corpus.attrs['properties']
|
||||
metadata['corpus_size_tokens'] = client_corpus.attrs['size']
|
||||
|
||||
text_attr = client_corpus.structural_attributes.get('text')
|
||||
struct_attrs = client_corpus.structural_attributes.list(
|
||||
filters={'part_of': text_attr})
|
||||
text_ids = range(0, (text_attr.attrs['size']))
|
||||
texts_metadata = {}
|
||||
for text_id in text_ids:
|
||||
texts_metadata[text_id] = {}
|
||||
for struct_attr in struct_attrs:
|
||||
texts_metadata[text_id][struct_attr.attrs['name'][(len(text_attr.attrs['name']) + 1):]] = struct_attr.values_by_ids(list(range(struct_attr.attrs['size'])))[text_id] # noqa
|
||||
metadata['corpus_all_texts'] = texts_metadata
|
||||
metadata['corpus_analysis_date'] = datetime.utcnow().isoformat()
|
||||
metadata['corpus_cqi_py_protocol_version'] = client.api.version
|
||||
metadata['corpus_cqi_py_package_version'] = cqi.__version__
|
||||
# TODO: make this dynamically
|
||||
metadata['corpus_cqpserver_version'] = 'CQPserver v3.4.22'
|
||||
|
||||
# write some metadata to the db
|
||||
db_corpus.current_nr_of_tokens = metadata['corpus_size_tokens']
|
||||
db.session.commit()
|
||||
event = 'user_{}_patch'.format(db_corpus.user_id)
|
||||
jsonpatch = [{'op': 'replace', 'path': '/corpora/{}/current_nr_of_tokens'.format(db_corpus.id), 'value': db_corpus.current_nr_of_tokens}] # noqa
|
||||
room = 'user_{}'.format(db_corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
|
||||
# emit data
|
||||
payload = metadata
|
||||
response = {'code': 200, 'desc': 'Corpus meta data', 'msg': 'OK',
|
||||
'payload': payload}
|
||||
socketio.emit('corpus_analysis_meta_data', response, room=request.sid)
|
||||
except cqi.errors.CQiException as e:
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500, 'desc': None, 'msg': 'Internal Server Error',
|
||||
'payload': payload}
|
||||
socketio.emit('corpus_analysis_meta_data', response, room=request.sid)
|
||||
client.status = 'ready'
|
||||
|
||||
|
||||
@socketio.on('corpus_analysis_query')
|
||||
@socketio_login_required
|
||||
def corpus_analysis_query(query):
|
||||
client = corpus_analysis_clients.get(request.sid)
|
||||
if client is None:
|
||||
response = {'code': 424, 'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency'}
|
||||
socketio.emit('corpus_analysis_query', response, room=request.sid)
|
||||
return
|
||||
if client.status == 'running':
|
||||
client.status = 'abort'
|
||||
while client.status != 'ready':
|
||||
socketio.sleep(0.1)
|
||||
client.status = 'running'
|
||||
try:
|
||||
corpus = client.corpora.get('CORPUS')
|
||||
query_status = corpus.query(query)
|
||||
results = corpus.subcorpora.get('Results')
|
||||
except cqi.errors.CQiException as e:
|
||||
client.status = 'ready'
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500, 'desc': None, 'msg': 'Internal Server Error',
|
||||
'payload': payload}
|
||||
socketio.emit('corpus_analysis_query', response, room=request.sid)
|
||||
return
|
||||
payload = {'status': query_status,
|
||||
'msg': cqi.api.specification.lookup[query_status],
|
||||
'match_count': results.attrs['size']}
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK', 'payload': payload}
|
||||
socketio.emit('corpus_analysis_query', response, room=request.sid)
|
||||
chunk_size = 100
|
||||
chunk_start = 0
|
||||
context = 50
|
||||
progress = 0
|
||||
while chunk_start <= results.attrs['size']:
|
||||
if client.status == 'abort':
|
||||
break
|
||||
try:
|
||||
chunk = results.export(context=context, cutoff=chunk_size, offset=chunk_start) # noqa
|
||||
except cqi.errors.CQiException as e:
|
||||
client.status = 'ready'
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500, 'desc': None, 'msg': 'Internal Server Error',
|
||||
'payload': payload}
|
||||
socketio.emit('corpus_analysis_query', response, room=request.sid)
|
||||
return
|
||||
if (results.attrs['size'] == 0):
|
||||
progress = 100
|
||||
else:
|
||||
progress = ((chunk_start + chunk_size) / results.attrs['size']) * 100 # noqa
|
||||
progress = min(100, int(math.ceil(progress)))
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK',
|
||||
'payload': {'chunk': chunk, 'progress': progress}}
|
||||
socketio.emit('corpus_analysis_query_results', response,
|
||||
room=request.sid)
|
||||
chunk_start += chunk_size
|
||||
client.status = 'ready'
|
||||
|
||||
|
||||
@socketio.on('corpus_analysis_inspect_match')
|
||||
@socketio_login_required
|
||||
def corpus_analysis_inspect_match(payload):
|
||||
client = corpus_analysis_clients.get(request.sid)
|
||||
if client is None:
|
||||
response = {'code': 424, 'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency'}
|
||||
socketio.emit('corpus_analysis_inspect_match', response, room=request.sid) # noqa
|
||||
return
|
||||
match_id = payload['match_id']
|
||||
if client.status == 'running':
|
||||
client.status = 'abort'
|
||||
while client.status != 'ready':
|
||||
socketio.sleep(0.1)
|
||||
client.status = 'running'
|
||||
try:
|
||||
corpus = client.corpora.get('CORPUS')
|
||||
results = corpus.subcorpora.get('Results')
|
||||
except cqi.errors.CQiException as e:
|
||||
client.status = 'ready'
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500, 'desc': None, 'msg': 'Internal Server Error',
|
||||
'payload': payload}
|
||||
socketio.emit('corpus_analysis_inspect_match', response, room=request.sid) # noqa
|
||||
return
|
||||
context = 200
|
||||
try:
|
||||
payload = results.export(context=context, cutoff=1, offset=match_id)
|
||||
except cqi.errors.CQiException as e:
|
||||
client.status = 'ready'
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500, 'desc': None, 'msg': 'Internal Server Error',
|
||||
'payload': payload}
|
||||
socketio.emit('corpus_analysis_inspect_match', response, room=request.sid) # noqa
|
||||
return
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK', 'payload': payload}
|
||||
socketio.emit('corpus_analysis_inspect_match', response, room=request.sid)
|
||||
client.status = 'ready'
|
||||
|
||||
|
||||
|
||||
@socketio.on('corpus_analysis_get_match_with_full_context')
|
||||
@socketio_login_required
|
||||
def corpus_analysis_get_match_with_full_context(payload):
|
||||
type = payload['type']
|
||||
data_indexes = payload['data_indexes']
|
||||
first_cpos = payload['first_cpos']
|
||||
last_cpos = payload['last_cpos']
|
||||
client = corpus_analysis_clients.get(request.sid)
|
||||
if client is None:
|
||||
response = {'code': 424, 'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency'}
|
||||
socketio.emit('corpus_analysis_get_match_with_full_context', response,
|
||||
room=request.sid)
|
||||
return
|
||||
if client.status == 'running':
|
||||
client.status = 'abort'
|
||||
while client.status != 'ready':
|
||||
socketio.sleep(0.1)
|
||||
client.status = 'running'
|
||||
try:
|
||||
corpus = client.corpora.get('CORPUS')
|
||||
s = corpus.structural_attributes.get('s')
|
||||
payload = {}
|
||||
payload['matches'] = []
|
||||
payload['cpos_lookup'] = {}
|
||||
payload['text_lookup'] = {}
|
||||
payload['progress'] = 0
|
||||
i = 0
|
||||
# Send data one match at a time.
|
||||
for index, f_cpos, l_cpos in zip(data_indexes, first_cpos, last_cpos):
|
||||
i += 1
|
||||
tmp_match = s.export(f_cpos, l_cpos, context=10)
|
||||
payload['matches'].append(tmp_match['matches'][0])
|
||||
payload['cpos_lookup'].update(tmp_match['cpos_lookup'])
|
||||
payload['text_lookup'].update(tmp_match['text_lookup'])
|
||||
payload['progress'] = i/len(data_indexes)*100
|
||||
response = {'code': 200,
|
||||
'desc': None,
|
||||
'msg': 'OK',
|
||||
'payload': payload,
|
||||
'type': type,
|
||||
'data_indexes': data_indexes}
|
||||
socketio.emit('corpus_analysis_get_match_with_full_context',
|
||||
response, room=request.sid)
|
||||
payload['matches'] = []
|
||||
payload['cpos_lookup'] = {}
|
||||
payload['text_lookup'] = {}
|
||||
except cqi.errors.CQiException as e:
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500,
|
||||
'desc': None,
|
||||
'msg': 'Internal Server Error',
|
||||
'payload': payload,
|
||||
'type': type,
|
||||
'data_indexes': data_indexes}
|
||||
socketio.emit('corpus_analysis_get_match_with_full_context',
|
||||
response,
|
||||
room=request.sid)
|
||||
client.status = 'ready'
|
||||
|
||||
|
||||
@socketio.on('export_corpus')
|
||||
@socketio_login_required
|
||||
def export_corpus(corpus_id):
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
if corpus is None:
|
||||
response = {'code': 404, 'msg': 'Not found'}
|
||||
socketio.emit('export_corpus', response, room=request.sid)
|
||||
return
|
||||
if corpus.status != 'prepared':
|
||||
response = {'code': 412, 'msg': 'Precondition Failed'}
|
||||
socketio.emit('export_corpus', response, room=request.sid)
|
||||
return
|
||||
# delete old corpus archive if it exists/has been build before
|
||||
if corpus.archive_file is not None and os.path.isfile(corpus.archive_file):
|
||||
os.remove(corpus.archive_file)
|
||||
zip_name = corpus.title
|
||||
zip_path = os.path.join(current_user.path, 'corpora', zip_name)
|
||||
corpus.archive_file = os.path.join(corpus.path, zip_name) + '.zip'
|
||||
db.session.commit()
|
||||
shutil.make_archive(zip_path, 'zip', corpus.path)
|
||||
shutil.move(zip_path + '.zip', corpus.archive_file)
|
||||
socketio.emit('export_corpus_' + str(corpus.id), room=request.sid)
|
166
app/corpora/forms.py
Normal file
@ -0,0 +1,166 @@
|
||||
from flask_wtf import FlaskForm
|
||||
from werkzeug.utils import secure_filename
|
||||
from wtforms import (BooleanField, FileField, StringField, SubmitField,
|
||||
ValidationError, IntegerField, SelectField)
|
||||
from wtforms.validators import DataRequired, Length, NumberRange
|
||||
|
||||
|
||||
class AddCorpusFileForm(FlaskForm):
|
||||
'''
|
||||
Form to add a .vrt corpus file to the current corpus.
|
||||
'''
|
||||
# Required fields
|
||||
author = StringField('Author', validators=[DataRequired(), Length(1, 255)])
|
||||
file = FileField('File', validators=[DataRequired()])
|
||||
publishing_year = IntegerField('Publishing year',
|
||||
validators=[DataRequired()])
|
||||
title = StringField('Title', validators=[DataRequired(), Length(1, 255)])
|
||||
# Optional fields
|
||||
address = StringField('Adress', validators=[Length(0, 255)])
|
||||
booktitle = StringField('Booktitle', validators=[Length(0, 255)])
|
||||
chapter = StringField('Chapter', validators=[Length(0, 255)])
|
||||
editor = StringField('Editor', validators=[Length(0, 255)])
|
||||
institution = StringField('Institution', validators=[Length(0, 255)])
|
||||
journal = StringField('Journal', validators=[Length(0, 255)])
|
||||
pages = StringField('Pages', validators=[Length(0, 255)])
|
||||
publisher = StringField('Publisher', validators=[Length(0, 255)])
|
||||
school = StringField('School', validators=[Length(0, 255)])
|
||||
submit = SubmitField()
|
||||
|
||||
def __init__(self, corpus, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.corpus = corpus
|
||||
|
||||
def validate_file(self, field):
|
||||
if not field.data.filename.lower().endswith('.vrt'):
|
||||
raise ValidationError('File does not have an approved extension: '
|
||||
'.vrt')
|
||||
field.data.filename = secure_filename(field.data.filename)
|
||||
for corpus_file in self.corpus.files:
|
||||
if field.data.filename == corpus_file.filename:
|
||||
raise ValidationError('File already registered to corpus.')
|
||||
|
||||
|
||||
class EditCorpusFileForm(FlaskForm):
|
||||
'''
|
||||
Form to edit meta data of one corpus file.
|
||||
'''
|
||||
# Required fields
|
||||
author = StringField('Author', validators=[DataRequired(), Length(1, 255)])
|
||||
publishing_year = IntegerField('Publishing year',
|
||||
validators=[DataRequired()])
|
||||
title = StringField('Title', validators=[DataRequired(), Length(1, 255)])
|
||||
# Optional fields
|
||||
address = StringField('Adress', validators=[Length(0, 255)])
|
||||
booktitle = StringField('Booktitle', validators=[Length(0, 255)])
|
||||
chapter = StringField('Chapter', validators=[Length(0, 255)])
|
||||
editor = StringField('Editor', validators=[Length(0, 255)])
|
||||
institution = StringField('Institution', validators=[Length(0, 255)])
|
||||
journal = StringField('Journal', validators=[Length(0, 255)])
|
||||
pages = StringField('Pages', validators=[Length(0, 255)])
|
||||
publisher = StringField('Publisher', validators=[Length(0, 255)])
|
||||
school = StringField('School', validators=[Length(0, 255)])
|
||||
submit = SubmitField()
|
||||
|
||||
|
||||
class AddCorpusForm(FlaskForm):
|
||||
'''
|
||||
Form to add a a new corpus.
|
||||
'''
|
||||
description = StringField('Description',
|
||||
validators=[DataRequired(), Length(1, 255)])
|
||||
submit = SubmitField()
|
||||
title = StringField('Title', validators=[DataRequired(), Length(1, 32)])
|
||||
|
||||
|
||||
class ImportCorpusForm(FlaskForm):
|
||||
'''
|
||||
Form to import a corpus.
|
||||
'''
|
||||
description = StringField('Description',
|
||||
validators=[DataRequired(), Length(1, 255)])
|
||||
file = FileField('File', validators=[DataRequired()])
|
||||
submit = SubmitField()
|
||||
title = StringField('Title', validators=[DataRequired(), Length(1, 32)])
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def validate_file(self, field):
|
||||
if not field.data.filename.lower().endswith('.zip'):
|
||||
raise ValidationError('File does not have an approved extension: '
|
||||
'.zip')
|
||||
field.data.filename = secure_filename(field.data.filename)
|
||||
|
||||
|
||||
class QueryForm(FlaskForm):
|
||||
'''
|
||||
Form to submit a query to the server which is executed via cqi-py.
|
||||
'''
|
||||
query = StringField('Query',
|
||||
validators=[DataRequired(), Length(1, 1024)])
|
||||
submit = SubmitField('Search')
|
||||
|
||||
|
||||
class DisplayOptionsForm(FlaskForm):
|
||||
'''
|
||||
Form to alter how the matches are represented to the user by the user.
|
||||
'''
|
||||
expert_mode = BooleanField('Expert mode')
|
||||
result_context = SelectField('Result context',
|
||||
choices=[('', 'Choose your option'),
|
||||
('10', '10'),
|
||||
('20', '20'),
|
||||
('30', '30'),
|
||||
('40', '40'),
|
||||
('50', '50')])
|
||||
results_per_page = SelectField('Results per page',
|
||||
choices=[('', 'Choose your option'),
|
||||
('10', '10'),
|
||||
('20', '20'),
|
||||
('30', '30'),
|
||||
('40', '40'),
|
||||
('50', '50')])
|
||||
|
||||
|
||||
class InspectDisplayOptionsForm(FlaskForm):
|
||||
'''
|
||||
Form for the inspect modal where the user can interact with how the current
|
||||
match is being represented to him.
|
||||
'''
|
||||
expert_mode_inspect = BooleanField('Expert mode')
|
||||
highlight_sentences = BooleanField('Split sentences')
|
||||
context_sentences = IntegerField('Context sentences',
|
||||
validators=[NumberRange(min=0, max=10)],
|
||||
default=3)
|
||||
|
||||
|
||||
class QueryDownloadForm(FlaskForm):
|
||||
'''
|
||||
Form to choose in what file format the analysis results are being
|
||||
downloaded. WIP.
|
||||
'''
|
||||
file_type = SelectField('File type',
|
||||
choices=[('', 'Choose file type'),
|
||||
('csv', 'csv'),
|
||||
('json', 'json'),
|
||||
('excel', 'excel'),
|
||||
('html', 'html-table')],
|
||||
validators=[DataRequired()])
|
||||
|
||||
|
||||
class AddQueryResultForm(FlaskForm):
|
||||
'''
|
||||
Form used to import one result json file.
|
||||
'''
|
||||
description = StringField('Description',
|
||||
validators=[DataRequired(), Length(1, 255)])
|
||||
file = FileField('File', validators=[DataRequired()])
|
||||
title = StringField('Title', validators=[DataRequired(), Length(1, 32)])
|
||||
submit = SubmitField()
|
||||
|
||||
def validate_file(self, field):
|
||||
if not field.data.filename.lower().endswith('.json'):
|
||||
raise ValidationError('File does not have an approved extension: '
|
||||
'.json')
|
||||
field.data.filename = secure_filename(field.data.filename)
|
89
app/corpora/import_corpus.py
Normal file
@ -0,0 +1,89 @@
|
||||
check_zip_contents = ['data/',
|
||||
'merged/',
|
||||
'registry/',
|
||||
'registry/corpus',
|
||||
'data/corpus/',
|
||||
'data/corpus/text_editor.avs',
|
||||
'data/corpus/pos.lexicon',
|
||||
'data/corpus/simple_pos.huf',
|
||||
'data/corpus/word.huf',
|
||||
'data/corpus/text_booktitle.avs',
|
||||
'data/corpus/word.lexicon.srt',
|
||||
'data/corpus/word.lexicon.idx',
|
||||
'data/corpus/simple_pos.crx',
|
||||
'data/corpus/text_pages.rng',
|
||||
'data/corpus/simple_pos.crc',
|
||||
'data/corpus/ner.lexicon',
|
||||
'data/corpus/lemma.huf',
|
||||
'data/corpus/text_title.rng',
|
||||
'data/corpus/text_chapter.avx',
|
||||
'data/corpus/lemma.lexicon.srt',
|
||||
'data/corpus/lemma.lexicon.idx',
|
||||
'data/corpus/text_school.rng',
|
||||
'data/corpus/text_journal.avs',
|
||||
'data/corpus/simple_pos.lexicon',
|
||||
'data/corpus/pos.huf',
|
||||
'data/corpus/text_editor.avx',
|
||||
'data/corpus/lemma.crc',
|
||||
'data/corpus/lemma.lexicon',
|
||||
'data/corpus/pos.hcd',
|
||||
'data/corpus/text_title.avx',
|
||||
'data/corpus/text_institution.avs',
|
||||
'data/corpus/text_address.avx',
|
||||
'data/corpus/lemma.corpus.cnt',
|
||||
'data/corpus/word.crx',
|
||||
'data/corpus/simple_pos.hcd',
|
||||
'data/corpus/simple_pos.huf.syn',
|
||||
'data/corpus/simple_pos.lexicon.srt',
|
||||
'data/corpus/text_author.avx',
|
||||
'data/corpus/text_publisher.avs',
|
||||
'data/corpus/text_chapter.avs',
|
||||
'data/corpus/ner.corpus.cnt',
|
||||
'data/corpus/pos.huf.syn',
|
||||
'data/corpus/text_booktitle.rng',
|
||||
'data/corpus/lemma.huf.syn',
|
||||
'data/corpus/pos.corpus.cnt',
|
||||
'data/corpus/word.lexicon',
|
||||
'data/corpus/text_publishing_year.avs',
|
||||
'data/corpus/lemma.hcd',
|
||||
'data/corpus/text_school.avs',
|
||||
'data/corpus/text_journal.rng',
|
||||
'data/corpus/word.corpus.cnt',
|
||||
'data/corpus/text_school.avx',
|
||||
'data/corpus/text_journal.avx',
|
||||
'data/corpus/pos.lexicon.srt',
|
||||
'data/corpus/text_title.avs',
|
||||
'data/corpus/word.hcd',
|
||||
'data/corpus/text_chapter.rng',
|
||||
'data/corpus/text_address.rng',
|
||||
'data/corpus/ner.hcd',
|
||||
'data/corpus/text_publisher.avx',
|
||||
'data/corpus/text_institution.rng',
|
||||
'data/corpus/lemma.crx',
|
||||
'data/corpus/pos.crc',
|
||||
'data/corpus/text_author.rng',
|
||||
'data/corpus/text_address.avs',
|
||||
'data/corpus/pos.lexicon.idx',
|
||||
'data/corpus/ner.huf',
|
||||
'data/corpus/ner.huf.syn',
|
||||
'data/corpus/text_pages.avs',
|
||||
'data/corpus/text_publishing_year.avx',
|
||||
'data/corpus/ner.lexicon.idx',
|
||||
'data/corpus/text.rng',
|
||||
'data/corpus/word.crc',
|
||||
'data/corpus/ner.crc',
|
||||
'data/corpus/text_publisher.rng',
|
||||
'data/corpus/text_editor.rng',
|
||||
'data/corpus/text_author.avs',
|
||||
'data/corpus/s.rng',
|
||||
'data/corpus/text_publishing_year.rng',
|
||||
'data/corpus/simple_pos.corpus.cnt',
|
||||
'data/corpus/simple_pos.lexicon.idx',
|
||||
'data/corpus/word.huf.syn',
|
||||
'data/corpus/ner.lexicon.srt',
|
||||
'data/corpus/text_pages.avx',
|
||||
'data/corpus/text_booktitle.avx',
|
||||
'data/corpus/pos.crx',
|
||||
'data/corpus/ner.crx',
|
||||
'data/corpus/text_institution.avx',
|
||||
'merged/corpus.vrt']
|
62
app/corpora/tasks.py
Normal file
@ -0,0 +1,62 @@
|
||||
from .. import db, socketio
|
||||
from ..decorators import background
|
||||
from ..models import Corpus, CorpusFile, QueryResult
|
||||
|
||||
|
||||
@background
|
||||
def build_corpus(corpus_id, *args, **kwargs):
|
||||
app = kwargs['app']
|
||||
with app.app_context():
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
if corpus is None:
|
||||
raise Exception('Corpus {} not found'.format(corpus_id))
|
||||
corpus.build()
|
||||
db.session.commit()
|
||||
event = 'user_{}_patch'.format(corpus.user_id)
|
||||
jsonpatch = [{'op': 'replace', 'path': '/corpora/{}/last_edited_date'.format(corpus.id), 'value': corpus.last_edited_date.timestamp()}, # noqa
|
||||
{'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}] # noqa
|
||||
room = 'user_{}'.format(corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
|
||||
|
||||
@background
|
||||
def delete_corpus(corpus_id, *args, **kwargs):
|
||||
with kwargs['app'].app_context():
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
if corpus is None:
|
||||
raise Exception('Corpus {} not found'.format(corpus_id))
|
||||
event = 'user_{}_patch'.format(corpus.user_id)
|
||||
jsonpatch = [{'op': 'remove', 'path': '/corpora/{}'.format(corpus.id)}]
|
||||
room = 'user_{}'.format(corpus.user_id)
|
||||
corpus.delete()
|
||||
db.session.commit()
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
|
||||
|
||||
@background
|
||||
def delete_corpus_file(corpus_file_id, *args, **kwargs):
|
||||
with kwargs['app'].app_context():
|
||||
corpus_file = CorpusFile.query.get(corpus_file_id)
|
||||
if corpus_file is None:
|
||||
raise Exception('Corpus file {} not found'.format(corpus_file_id))
|
||||
event = 'user_{}_patch'.format(corpus_file.corpus.user_id)
|
||||
jsonpatch = [{'op': 'remove', 'path': '/corpora/{}/files/{}'.format(corpus_file.corpus_id, corpus_file.id)}, # noqa
|
||||
{'op': 'replace', 'path': '/corpora/{}/status'.format(corpus_file.corpus_id), 'value': corpus_file.corpus.status}] # noqa
|
||||
room = 'user_{}'.format(corpus_file.corpus.user_id)
|
||||
corpus_file.delete()
|
||||
db.session.commit()
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
|
||||
|
||||
@background
|
||||
def delete_query_result(query_result_id, *args, **kwargs):
|
||||
with kwargs['app'].app_context():
|
||||
query_result = QueryResult.query.get(query_result_id)
|
||||
if query_result is None:
|
||||
raise Exception('QueryResult {} not found'.format(query_result_id))
|
||||
event = 'user_{}_patch'.format(query_result.user_id)
|
||||
jsonpatch = [{'op': 'remove', 'path': '/query_results/{}'.format(query_result.id)}] # noqa
|
||||
room = 'user_{}'.format(query_result.user_id)
|
||||
query_result.delete()
|
||||
db.session.commit()
|
||||
socketio.emit(event, jsonpatch, room=room)
|
431
app/corpora/views.py
Normal file
@ -0,0 +1,431 @@
|
||||
from flask import (abort, flash, make_response, redirect, request,
|
||||
render_template, url_for, send_from_directory)
|
||||
from flask_login import current_user, login_required
|
||||
from . import corpora
|
||||
from . import tasks
|
||||
from .forms import (AddCorpusFileForm, AddCorpusForm, AddQueryResultForm,
|
||||
EditCorpusFileForm, QueryDownloadForm, QueryForm,
|
||||
DisplayOptionsForm, InspectDisplayOptionsForm,
|
||||
ImportCorpusForm)
|
||||
from jsonschema import validate
|
||||
from .. import db, socketio
|
||||
from ..models import Corpus, CorpusFile, QueryResult
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import glob
|
||||
import xml.etree.ElementTree as ET
|
||||
from zipfile import ZipFile
|
||||
from .import_corpus import check_zip_contents
|
||||
|
||||
|
||||
@corpora.route('/add', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def add_corpus():
|
||||
form = AddCorpusForm()
|
||||
if form.validate_on_submit():
|
||||
corpus = Corpus(creator=current_user,
|
||||
description=form.description.data,
|
||||
title=form.title.data)
|
||||
db.session.add(corpus)
|
||||
db.session.flush()
|
||||
db.session.refresh(corpus)
|
||||
try:
|
||||
os.makedirs(corpus.path)
|
||||
except OSError:
|
||||
logging.error('Make dir {} led to an OSError!'.format(corpus.path))
|
||||
db.session.rollback()
|
||||
abort(500)
|
||||
else:
|
||||
db.session.commit()
|
||||
flash('Corpus "{}" added!'.format(corpus.title), 'corpus')
|
||||
event = 'user_{}_patch'.format(corpus.user_id)
|
||||
jsonpatch = [{'op': 'add', 'path': '/corpora/{}'.format(corpus.id), 'value': corpus.to_dict()}] # noqa
|
||||
room = 'user_{}'.format(corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
return redirect(url_for('.corpus', corpus_id=corpus.id))
|
||||
return render_template('corpora/add_corpus.html.j2', form=form,
|
||||
title='Add corpus')
|
||||
|
||||
|
||||
@corpora.route('/import', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def import_corpus():
|
||||
form = ImportCorpusForm()
|
||||
if form.is_submitted():
|
||||
if not form.validate():
|
||||
return make_response(form.errors, 400)
|
||||
corpus = Corpus(creator=current_user,
|
||||
description=form.description.data,
|
||||
title=form.title.data)
|
||||
db.session.add(corpus)
|
||||
db.session.flush()
|
||||
db.session.refresh(corpus)
|
||||
try:
|
||||
os.makedirs(corpus.path)
|
||||
except OSError:
|
||||
logging.error('Make dir {} led to an OSError!'.format(corpus.path))
|
||||
db.session.rollback()
|
||||
flash('Internal Server Error', 'error')
|
||||
return make_response(
|
||||
{'redirect_url': url_for('.import_corpus')}, 500)
|
||||
# Upload zip
|
||||
archive_file = os.path.join(corpus.path, form.file.data.filename)
|
||||
form.file.data.save(archive_file)
|
||||
# Some checks to verify it is a valid exported corpus
|
||||
with ZipFile(archive_file, 'r') as zip:
|
||||
contents = zip.namelist()
|
||||
if set(check_zip_contents).issubset(contents):
|
||||
# Unzip
|
||||
shutil.unpack_archive(archive_file, corpus.path)
|
||||
# Register vrt files to corpus
|
||||
vrts = glob.glob(corpus.path + '/*.vrt')
|
||||
for file in vrts:
|
||||
element_tree = ET.parse(file)
|
||||
text_node = element_tree.find('text')
|
||||
corpus_file = CorpusFile(
|
||||
address=text_node.get('address', 'NULL'),
|
||||
author=text_node.get('author', 'NULL'),
|
||||
booktitle=text_node.get('booktitle', 'NULL'),
|
||||
chapter=text_node.get('chapter', 'NULL'),
|
||||
corpus=corpus,
|
||||
editor=text_node.get('editor', 'NULL'),
|
||||
filename=os.path.basename(file),
|
||||
institution=text_node.get('institution', 'NULL'),
|
||||
journal=text_node.get('journal', 'NULL'),
|
||||
pages=text_node.get('pages', 'NULL'),
|
||||
publisher=text_node.get('publisher', 'NULL'),
|
||||
publishing_year=text_node.get('publishing_year', ''),
|
||||
school=text_node.get('school', 'NULL'),
|
||||
title=text_node.get('title', 'NULL')
|
||||
)
|
||||
db.session.add(corpus_file)
|
||||
# finish import and redirect to imported corpus
|
||||
corpus.status = 'prepared'
|
||||
db.session.commit()
|
||||
os.remove(archive_file)
|
||||
flash('Corpus "{}" imported!'.format(corpus.title), 'corpus')
|
||||
event = 'user_{}_patch'.format(corpus.user_id)
|
||||
jsonpatch = [{'op': 'add', 'path': '/corpora/{}'.format(corpus.id), 'value': corpus.to_dict()}] # noqa
|
||||
room = 'user_{}'.format(corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
return make_response(
|
||||
{'redirect_url': url_for('.corpus', corpus_id=corpus.id)}, 201)
|
||||
else:
|
||||
# If imported zip is not valid delete corpus and give feedback
|
||||
flash('Can not import corpus "{}" not imported: Invalid archive file!', 'error') # noqa
|
||||
tasks.delete_corpus(corpus.id)
|
||||
return make_response(
|
||||
{'redirect_url': url_for('.import_corpus')}, 201)
|
||||
return render_template('corpora/import_corpus.html.j2', form=form,
|
||||
title='Import Corpus')
|
||||
|
||||
|
||||
@corpora.route('/<int:corpus_id>')
|
||||
@login_required
|
||||
def corpus(corpus_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if not (corpus.creator == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
corpus_files = [corpus_file.to_dict() for corpus_file in corpus.files]
|
||||
return render_template('corpora/corpus.html.j2', corpus=corpus,
|
||||
corpus_files=corpus_files, title='Corpus')
|
||||
|
||||
|
||||
@corpora.route('/<int:corpus_id>/download')
|
||||
@login_required
|
||||
def download_corpus(corpus_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if not (corpus.creator == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
# TODO: Check what happens here
|
||||
dir = os.path.dirname(corpus.archive_file)
|
||||
filename = os.path.basename(corpus.archive_file)
|
||||
return send_from_directory(as_attachment=True, directory=dir,
|
||||
filename=filename, mimetype='zip')
|
||||
|
||||
|
||||
@corpora.route('/<int:corpus_id>/analyse')
|
||||
@login_required
|
||||
def analyse_corpus(corpus_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
display_options_form = DisplayOptionsForm(
|
||||
prefix='display-options-form',
|
||||
result_context=request.args.get('context', 20),
|
||||
results_per_page=request.args.get('results_per_page', 30)
|
||||
)
|
||||
query_form = QueryForm(prefix='query-form',
|
||||
query=request.args.get('query'))
|
||||
query_download_form = QueryDownloadForm(prefix='query-download-form')
|
||||
inspect_display_options_form = InspectDisplayOptionsForm(
|
||||
prefix='inspect-display-options-form')
|
||||
return render_template(
|
||||
'corpora/analyse_corpus.html.j2',
|
||||
corpus=corpus,
|
||||
display_options_form=display_options_form,
|
||||
inspect_display_options_form=inspect_display_options_form,
|
||||
query_form=query_form,
|
||||
query_download_form=query_download_form,
|
||||
title='Corpus analysis'
|
||||
)
|
||||
|
||||
|
||||
@corpora.route('/<int:corpus_id>/delete')
|
||||
@login_required
|
||||
def delete_corpus(corpus_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if not (corpus.creator == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
flash('Corpus "{}" marked for deletion!'.format(corpus.title), 'corpus')
|
||||
tasks.delete_corpus(corpus_id)
|
||||
return redirect(url_for('main.dashboard'))
|
||||
|
||||
|
||||
@corpora.route('/<int:corpus_id>/files/add', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def add_corpus_file(corpus_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if not (corpus.creator == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
form = AddCorpusFileForm(corpus, prefix='add-corpus-file-form')
|
||||
if form.is_submitted():
|
||||
if not form.validate():
|
||||
return make_response(form.errors, 400)
|
||||
# Save the file
|
||||
form.file.data.save(os.path.join(corpus.path, form.file.data.filename))
|
||||
corpus_file = CorpusFile(address=form.address.data,
|
||||
author=form.author.data,
|
||||
booktitle=form.booktitle.data,
|
||||
chapter=form.chapter.data,
|
||||
corpus=corpus,
|
||||
editor=form.editor.data,
|
||||
filename=form.file.data.filename,
|
||||
institution=form.institution.data,
|
||||
journal=form.journal.data,
|
||||
pages=form.pages.data,
|
||||
publisher=form.publisher.data,
|
||||
publishing_year=form.publishing_year.data,
|
||||
school=form.school.data,
|
||||
title=form.title.data)
|
||||
db.session.add(corpus_file)
|
||||
corpus.status = 'unprepared'
|
||||
db.session.commit()
|
||||
flash('Corpus file "{}" added!'.format(corpus_file.filename), 'corpus')
|
||||
event = 'user_{}_patch'.format(corpus.user_id)
|
||||
jsonpatch = [{'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}, # noqa
|
||||
{'op': 'add', 'path': '/corpora/{}/files/{}'.format(corpus.id, corpus_file.id), 'value': corpus_file.to_dict()}] # noqa
|
||||
room = 'user_{}'.format(corpus.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
return make_response({'redirect_url': url_for('.corpus', corpus_id=corpus.id)}, 201) # noqa
|
||||
return render_template('corpora/add_corpus_file.html.j2', corpus=corpus,
|
||||
form=form, title='Add corpus file')
|
||||
|
||||
|
||||
@corpora.route('/<int:corpus_id>/files/<int:corpus_file_id>/delete')
|
||||
@login_required
|
||||
def delete_corpus_file(corpus_id, corpus_file_id):
|
||||
corpus_file = CorpusFile.query.get_or_404(corpus_file_id)
|
||||
if not corpus_file.corpus_id == corpus_id:
|
||||
abort(404)
|
||||
if not (corpus_file.corpus.creator == current_user
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
flash('Corpus file "{}" marked for deletion!'.format(corpus_file.filename), 'corpus') # noqa
|
||||
tasks.delete_corpus_file(corpus_file_id)
|
||||
return redirect(url_for('.corpus', corpus_id=corpus_id))
|
||||
|
||||
|
||||
@corpora.route('/<int:corpus_id>/files/<int:corpus_file_id>/download')
|
||||
@login_required
|
||||
def download_corpus_file(corpus_id, corpus_file_id):
|
||||
corpus_file = CorpusFile.query.get_or_404(corpus_file_id)
|
||||
if not corpus_file.corpus_id == corpus_id:
|
||||
abort(404)
|
||||
if not (corpus_file.corpus.creator == current_user
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return send_from_directory(as_attachment=True,
|
||||
directory=os.path.dirname(corpus_file.path),
|
||||
filename=corpus_file.filename)
|
||||
|
||||
|
||||
@corpora.route('/<int:corpus_id>/files/<int:corpus_file_id>',
|
||||
methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def corpus_file(corpus_id, corpus_file_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if not (corpus.creator == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
corpus_file = CorpusFile.query.get_or_404(corpus_file_id)
|
||||
if corpus_file.corpus != corpus:
|
||||
abort(404)
|
||||
form = EditCorpusFileForm(prefix='edit-corpus-file-form')
|
||||
if form.validate_on_submit():
|
||||
corpus_file.address = form.address.data
|
||||
corpus_file.author = form.author.data
|
||||
corpus_file.booktitle = form.booktitle.data
|
||||
corpus_file.chapter = form.chapter.data
|
||||
corpus_file.editor = form.editor.data
|
||||
corpus_file.institution = form.institution.data
|
||||
corpus_file.journal = form.journal.data
|
||||
corpus_file.pages = form.pages.data
|
||||
corpus_file.publisher = form.publisher.data
|
||||
corpus_file.publishing_year = form.publishing_year.data
|
||||
corpus_file.school = form.school.data
|
||||
corpus_file.title = form.title.data
|
||||
corpus.status = 'unprepared'
|
||||
db.session.commit()
|
||||
flash('Corpus file "{}" edited!'.format(corpus_file.filename), 'corpus') # noqa
|
||||
return redirect(url_for('.corpus', corpus_id=corpus_id))
|
||||
# If no form is submitted or valid, fill out fields with current values
|
||||
form.address.data = corpus_file.address
|
||||
form.author.data = corpus_file.author
|
||||
form.booktitle.data = corpus_file.booktitle
|
||||
form.chapter.data = corpus_file.chapter
|
||||
form.editor.data = corpus_file.editor
|
||||
form.institution.data = corpus_file.institution
|
||||
form.journal.data = corpus_file.journal
|
||||
form.pages.data = corpus_file.pages
|
||||
form.publisher.data = corpus_file.publisher
|
||||
form.publishing_year.data = corpus_file.publishing_year
|
||||
form.school.data = corpus_file.school
|
||||
form.title.data = corpus_file.title
|
||||
return render_template('corpora/corpus_file.html.j2', corpus=corpus,
|
||||
corpus_file=corpus_file, form=form,
|
||||
title='Edit corpus file')
|
||||
|
||||
|
||||
@corpora.route('/<int:corpus_id>/prepare')
|
||||
@login_required
|
||||
def prepare_corpus(corpus_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if not (corpus.creator == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
if corpus.files.all():
|
||||
tasks.build_corpus(corpus_id)
|
||||
flash('Corpus "{}" has been marked to get build!'.format(corpus.title), 'corpus') # noqa
|
||||
else:
|
||||
flash('Can not build corpus "{}": No corpus file(s)!'.format(corpus.title), 'error') # noqa
|
||||
return redirect(url_for('.corpus', corpus_id=corpus_id))
|
||||
|
||||
|
||||
# Following are view functions to add, view etc. exported results.
|
||||
@corpora.route('/result/add', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def add_query_result():
|
||||
'''
|
||||
View to import a result as a json file.
|
||||
'''
|
||||
form = AddQueryResultForm(prefix='add-query-result-form')
|
||||
if form.is_submitted():
|
||||
if not form.validate():
|
||||
return make_response(form.errors, 400)
|
||||
query_result = QueryResult(creator=current_user,
|
||||
description=form.description.data,
|
||||
filename=form.file.data.filename,
|
||||
title=form.title.data)
|
||||
db.session.add(query_result)
|
||||
db.session.flush()
|
||||
db.session.refresh(query_result)
|
||||
try:
|
||||
os.makedirs(os.path.dirname(query_result.path))
|
||||
except OSError:
|
||||
logging.error('Make dir {} led to an OSError!'.format(query_result.path)) # noqa
|
||||
db.session.rollback()
|
||||
flash('Internal Server Error', 'error')
|
||||
return make_response(
|
||||
{'redirect_url': url_for('.add_query_result')}, 500)
|
||||
# save the uploaded file
|
||||
form.file.data.save(query_result.path)
|
||||
# parse json from file
|
||||
with open(query_result.path, 'r') as file:
|
||||
query_result_file_content = json.load(file)
|
||||
# parse json schema
|
||||
# with open('app/static/json_schema/nopaque_cqi_py_results_schema.json', 'r') as file: # noqa
|
||||
# schema = json.load(file)
|
||||
# try:
|
||||
# # validate imported json file
|
||||
# validate(instance=query_result_file_content, schema=schema)
|
||||
# except Exception:
|
||||
# tasks.delete_query_result(query_result.id)
|
||||
# flash('Uploaded file is invalid', 'result')
|
||||
# return make_response(
|
||||
# {'redirect_url': url_for('.add_query_result')}, 201)
|
||||
query_result_file_content.pop('matches')
|
||||
query_result_file_content.pop('cpos_lookup')
|
||||
query_result.query_metadata = query_result_file_content
|
||||
db.session.commit()
|
||||
event = 'user_{}_patch'.format(query_result.user_id)
|
||||
jsonpatch = [{'op': 'add', 'path': '/query_results/{}'.format(query_result.id), 'value': query_result.to_dict()}] # noqa
|
||||
room = 'user_{}'.format(query_result.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
flash('Query result added!', 'result')
|
||||
return make_response({'redirect_url': url_for('.query_result', query_result_id=query_result.id)}, 201) # noqa
|
||||
return render_template('corpora/query_results/add_query_result.html.j2',
|
||||
form=form, title='Add query result')
|
||||
|
||||
|
||||
@corpora.route('/result/<int:query_result_id>')
|
||||
@login_required
|
||||
def query_result(query_result_id):
|
||||
query_result = QueryResult.query.get_or_404(query_result_id)
|
||||
if not (query_result.creator == current_user
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return render_template('corpora/query_results/query_result.html.j2',
|
||||
query_result=query_result, title='Query result')
|
||||
|
||||
|
||||
@corpora.route('/result/<int:query_result_id>/inspect')
|
||||
@login_required
|
||||
def inspect_query_result(query_result_id):
|
||||
'''
|
||||
View to inspect imported result file in a corpus analysis like interface
|
||||
'''
|
||||
query_result = QueryResult.query.get_or_404(query_result_id)
|
||||
query_metadata = query_result.query_metadata
|
||||
if not (query_result.creator == current_user
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
display_options_form = DisplayOptionsForm(
|
||||
prefix='display-options-form',
|
||||
results_per_page=request.args.get('results_per_page', 30),
|
||||
result_context=request.args.get('context', 20)
|
||||
)
|
||||
inspect_display_options_form = InspectDisplayOptionsForm(
|
||||
prefix='inspect-display-options-form'
|
||||
)
|
||||
with open(query_result.path, 'r') as query_result_file:
|
||||
query_result_file_content = json.load(query_result_file)
|
||||
return render_template('corpora/query_results/inspect.html.j2',
|
||||
query_result=query_result,
|
||||
display_options_form=display_options_form,
|
||||
inspect_display_options_form=inspect_display_options_form, # noqa
|
||||
query_result_file_content=query_result_file_content,
|
||||
query_metadata=query_metadata,
|
||||
title='Inspect query result')
|
||||
|
||||
|
||||
@corpora.route('/result/<int:query_result_id>/delete')
|
||||
@login_required
|
||||
def delete_query_result(query_result_id):
|
||||
query_result = QueryResult.query.get_or_404(query_result_id)
|
||||
if not (query_result.creator == current_user
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
flash('Query result "{}" has been marked for deletion!'.format(query_result), 'result') # noqa
|
||||
tasks.delete_query_result(query_result_id)
|
||||
return redirect(url_for('services.service', service="corpus_analysis"))
|
||||
|
||||
|
||||
@corpora.route('/result/<int:query_result_id>/download')
|
||||
@login_required
|
||||
def download_query_result(query_result_id):
|
||||
query_result = QueryResult.query.get_or_404(query_result_id)
|
||||
if not (query_result.creator == current_user
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return send_from_directory(as_attachment=True,
|
||||
directory=os.path.dirname(query_result.path),
|
||||
filename=query_result.filename)
|
54
app/decorators.py
Normal file
@ -0,0 +1,54 @@
|
||||
from . import socketio
|
||||
from flask import abort, current_app, request
|
||||
from flask_login import current_user
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def admin_required(f):
|
||||
@wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
if current_user.is_administrator:
|
||||
return f(*args, **kwargs)
|
||||
else:
|
||||
abort(403)
|
||||
return wrapped
|
||||
|
||||
|
||||
def background(f):
|
||||
'''
|
||||
' This decorator executes a function in a Thread.
|
||||
' Decorated functions need to be executed within a code block where an
|
||||
' app context exists.
|
||||
'
|
||||
' NOTE: An app object is passed as a keyword argument to the decorated
|
||||
' function.
|
||||
'''
|
||||
@wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
kwargs['app'] = current_app._get_current_object()
|
||||
kwargs['current_user'] = current_user._get_current_object()
|
||||
thread = socketio.start_background_task(f, *args, **kwargs)
|
||||
return thread
|
||||
return wrapped
|
||||
|
||||
|
||||
def socketio_admin_required(f):
|
||||
@wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
if current_user.is_administrator:
|
||||
return f(*args, **kwargs)
|
||||
else:
|
||||
response = {'code': 401, 'msg': 'Unauthorized'}
|
||||
socketio.emit(request.event['message'], response, room=request.sid)
|
||||
return wrapped
|
||||
|
||||
|
||||
def socketio_login_required(f):
|
||||
@wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
if current_user.is_authenticated:
|
||||
return f(*args, **kwargs)
|
||||
else:
|
||||
response = {'code': 401, 'msg': 'Unauthorized'}
|
||||
socketio.emit(request.event['message'], response, room=request.sid)
|
||||
return wrapped
|
18
app/email.py
Normal file
@ -0,0 +1,18 @@
|
||||
from flask import current_app, render_template
|
||||
from flask_mail import Message
|
||||
from . import mail
|
||||
from .decorators import background
|
||||
|
||||
|
||||
def create_message(recipient, subject, template, **kwargs):
|
||||
msg = Message('{} {}'.format(current_app.config['NOPAQUE_MAIL_SUBJECT_PREFIX'], subject), recipients=[recipient]) # noqa
|
||||
msg.body = render_template('{}.txt.j2'.format(template), **kwargs)
|
||||
msg.html = render_template('{}.html.j2'.format(template), **kwargs)
|
||||
return msg
|
||||
|
||||
|
||||
@background
|
||||
def send(msg, *args, **kwargs):
|
||||
app = kwargs['app']
|
||||
with app.app_context():
|
||||
mail.send(msg)
|
5
app/errors/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from flask import Blueprint
|
||||
|
||||
|
||||
errors = Blueprint('errors', __name__)
|
||||
from app.errors import handlers
|
42
app/errors/handlers.py
Normal file
@ -0,0 +1,42 @@
|
||||
from flask import render_template, request, jsonify
|
||||
from . import errors
|
||||
|
||||
|
||||
@errors.app_errorhandler(403)
|
||||
def forbidden(e):
|
||||
if (request.accept_mimetypes.accept_json
|
||||
and not request.accept_mimetypes.accept_html):
|
||||
response = jsonify({'error': 'forbidden'})
|
||||
response.status_code = 403
|
||||
return response
|
||||
return render_template('errors/403.html.j2', title='Forbidden'), 403
|
||||
|
||||
|
||||
@errors.app_errorhandler(404)
|
||||
def not_found(e):
|
||||
if (request.accept_mimetypes.accept_json
|
||||
and not request.accept_mimetypes.accept_html):
|
||||
response = jsonify({'error': 'not found'})
|
||||
response.status_code = 404
|
||||
return response
|
||||
return render_template('errors/404.html.j2', title='Not Found'), 404
|
||||
|
||||
|
||||
@errors.app_errorhandler(413)
|
||||
def payload_too_large(e):
|
||||
if (request.accept_mimetypes.accept_json
|
||||
and not request.accept_mimetypes.accept_html):
|
||||
response = jsonify({'error': 'payload too large'})
|
||||
response.status_code = 413
|
||||
return response
|
||||
return render_template('errors/413.html.j2', title='Payload Too Large'), 413
|
||||
|
||||
|
||||
@errors.app_errorhandler(500)
|
||||
def internal_server_error(e):
|
||||
if (request.accept_mimetypes.accept_json
|
||||
and not request.accept_mimetypes.accept_html):
|
||||
response = jsonify({'error': 'internal server error'})
|
||||
response.status_code = 500
|
||||
return response
|
||||
return render_template('errors/500.html.j2', title='Internal Server Error'), 500
|
72
app/events.py
Normal file
@ -0,0 +1,72 @@
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_socketio import join_room, leave_room
|
||||
from . import socketio
|
||||
from .decorators import socketio_login_required
|
||||
from .models import User
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Socket.IO event handlers #
|
||||
###############################################################################
|
||||
'''
|
||||
' A list containing session ids of connected Socket.IO sessions, to keep track
|
||||
' of all connected sessions, which can be used to determine the runtimes of
|
||||
' associated background tasks.
|
||||
'''
|
||||
socketio_sessions = []
|
||||
|
||||
|
||||
@socketio.on('connect')
|
||||
@socketio_login_required
|
||||
def socketio_connect():
|
||||
'''
|
||||
' The Socket.IO module creates a session id (sid) for each request.
|
||||
' On connect the sid is saved in the connected sessions list.
|
||||
'''
|
||||
socketio_sessions.append(request.sid)
|
||||
|
||||
|
||||
@socketio.on('disconnect')
|
||||
def socketio_disconnect():
|
||||
'''
|
||||
' On disconnect the session id gets removed from the connected sessions
|
||||
' list.
|
||||
'''
|
||||
socketio_sessions.remove(request.sid)
|
||||
|
||||
|
||||
@socketio.on('start_user_session')
|
||||
@socketio_login_required
|
||||
def socketio_start_user_session(user_id):
|
||||
user = User.query.get(user_id)
|
||||
if user is None:
|
||||
response = {'code': 404, 'msg': 'Not found'}
|
||||
socketio.emit('start_user_session', response, room=request.sid)
|
||||
elif not (user == current_user or current_user.is_administrator):
|
||||
response = {'code': 403, 'msg': 'Forbidden'}
|
||||
socketio.emit('start_user_session', response, room=request.sid)
|
||||
else:
|
||||
response = {'code': 200, 'msg': 'OK'}
|
||||
socketio.emit('start_user_session', response, room=request.sid)
|
||||
socketio.emit('user_{}_init'.format(user.id), user.to_dict(),
|
||||
room=request.sid)
|
||||
room = 'user_{}'.format(user.id)
|
||||
join_room(room)
|
||||
|
||||
|
||||
@socketio.on('stop_user_session')
|
||||
@socketio_login_required
|
||||
def socketio_stop_user_session(user_id):
|
||||
user = User.query.get(user_id)
|
||||
if user is None:
|
||||
response = {'code': 404, 'msg': 'Not found'}
|
||||
socketio.emit('stop_user_session', response, room=request.sid)
|
||||
elif not (user == current_user or current_user.is_administrator):
|
||||
response = {'code': 403, 'msg': 'Forbidden'}
|
||||
socketio.emit('stop_user_session', response, room=request.sid)
|
||||
else:
|
||||
response = {'code': 200, 'msg': 'OK'}
|
||||
socketio.emit('stop_user_session', response, room=request.sid)
|
||||
room = 'user_{}'.format(user.id)
|
||||
leave_room(room)
|
5
app/jobs/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from flask import Blueprint
|
||||
|
||||
|
||||
jobs = Blueprint('jobs', __name__)
|
||||
from . import views
|
36
app/jobs/tasks.py
Normal file
@ -0,0 +1,36 @@
|
||||
from .. import db, socketio
|
||||
from ..decorators import background
|
||||
from ..models import Job
|
||||
|
||||
|
||||
@background
|
||||
def delete_job(job_id, *args, **kwargs):
|
||||
with kwargs['app'].app_context():
|
||||
job = Job.query.get(job_id)
|
||||
if job is None:
|
||||
raise Exception('Job {} not found'.format(job_id))
|
||||
event = 'user_{}_patch'.format(job.user_id)
|
||||
jsonpatch = [{'op': 'remove', 'path': '/jobs/{}'.format(job.id)}]
|
||||
room = 'user_{}'.format(job.user_id)
|
||||
job.delete()
|
||||
db.session.commit()
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
|
||||
|
||||
@background
|
||||
def restart_job(job_id, *args, **kwargs):
|
||||
with kwargs['app'].app_context():
|
||||
job = Job.query.get(job_id)
|
||||
if job is None:
|
||||
raise Exception('Job {} not found'.format(job_id))
|
||||
try:
|
||||
job.restart()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
db.session.commit()
|
||||
event = 'user_{}_patch'.format(job.user_id)
|
||||
jsonpatch = [{'op': 'replace', 'path': '/jobs/{}/end_date'.format(job.id), 'value': job.end_date.timestamp()}, # noqa
|
||||
{'op': 'replace', 'path': '/jobs/{}/status'.format(job.id), 'value': job.status}] # noqa
|
||||
room = 'user_{}'.format(job.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
67
app/jobs/views.py
Normal file
@ -0,0 +1,67 @@
|
||||
from flask import (abort, flash, redirect, render_template,
|
||||
send_from_directory, url_for)
|
||||
from flask_login import current_user, login_required
|
||||
from . import jobs
|
||||
from . import tasks
|
||||
from ..decorators import admin_required
|
||||
from ..models import Job, JobInput, JobResult
|
||||
import os
|
||||
|
||||
|
||||
@jobs.route('/<int:job_id>')
|
||||
@login_required
|
||||
def job(job_id):
|
||||
job = Job.query.get_or_404(job_id)
|
||||
if not (job.creator == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
job_inputs = [job_input.to_dict() for job_input in job.inputs]
|
||||
return render_template('jobs/job.html.j2', job=job, job_inputs=job_inputs,
|
||||
title='Job')
|
||||
|
||||
|
||||
@jobs.route('/<int:job_id>/delete')
|
||||
@login_required
|
||||
def delete_job(job_id):
|
||||
job = Job.query.get_or_404(job_id)
|
||||
if not (job.creator == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
tasks.delete_job(job_id)
|
||||
flash('Job has been marked for deletion!', 'job')
|
||||
return redirect(url_for('main.dashboard'))
|
||||
|
||||
|
||||
@jobs.route('/<int:job_id>/inputs/<int:job_input_id>/download')
|
||||
@login_required
|
||||
def download_job_input(job_id, job_input_id):
|
||||
job_input = JobInput.query.filter(JobInput.job_id == job_id, JobInput.id == job_input_id).first_or_404() # noqa
|
||||
if not (job_input.job.creator == current_user
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return send_from_directory(as_attachment=True,
|
||||
directory=os.path.dirname(job_input.path),
|
||||
filename=job_input.filename)
|
||||
|
||||
|
||||
@jobs.route('/<int:job_id>/restart')
|
||||
@login_required
|
||||
@admin_required
|
||||
def restart(job_id):
|
||||
job = Job.query.get_or_404(job_id)
|
||||
if job.status not in ['complete', 'failed']:
|
||||
flash('Can not restart job "{}": Status is not "complete/failed"'.format(job.title), 'error') # noqa
|
||||
else:
|
||||
tasks.restart_job(job_id)
|
||||
flash('Job "{}" has been marked to get restarted!'.format(job.title), 'job') # noqa
|
||||
return redirect(url_for('.job', job_id=job_id))
|
||||
|
||||
|
||||
@jobs.route('/<int:job_id>/results/<int:job_result_id>/download')
|
||||
@login_required
|
||||
def download_job_result(job_id, job_result_id):
|
||||
job_result = JobResult.query.filter(JobResult.job_id == job_id, JobResult.id == job_result_id).first_or_404() # noqa
|
||||
if not (job_result.job.creator == current_user
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return send_from_directory(as_attachment=True,
|
||||
directory=os.path.dirname(job_result.path),
|
||||
filename=job_result.filename)
|
5
app/main/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from flask import Blueprint
|
||||
|
||||
|
||||
main = Blueprint('main', __name__)
|
||||
from . import views
|
46
app/main/views.py
Normal file
@ -0,0 +1,46 @@
|
||||
from flask import flash, redirect, render_template, url_for
|
||||
from flask_login import login_required, login_user
|
||||
from . import main
|
||||
from ..auth.forms import LoginForm
|
||||
from ..models import User
|
||||
|
||||
|
||||
@main.route('/', methods=['GET', 'POST'])
|
||||
def index():
|
||||
form = LoginForm(prefix='login-form')
|
||||
if form.validate_on_submit():
|
||||
user = User.query.filter_by(username=form.user.data).first()
|
||||
if user is None:
|
||||
user = User.query.filter_by(email=form.user.data.lower()).first()
|
||||
if user is not None and user.verify_password(form.password.data):
|
||||
login_user(user, form.remember_me.data)
|
||||
return redirect(url_for('.dashboard'))
|
||||
flash('Invalid email/username or password.')
|
||||
return render_template('main/index.html.j2', form=form, title='nopaque')
|
||||
|
||||
|
||||
@main.route('/about_and_faq')
|
||||
def about_and_faq():
|
||||
return render_template('main/about_and_faq.html.j2', title='About and faq')
|
||||
|
||||
|
||||
@main.route('/dashboard')
|
||||
@login_required
|
||||
def dashboard():
|
||||
return render_template('main/dashboard.html.j2', title='Dashboard')
|
||||
|
||||
|
||||
@main.route('/news')
|
||||
def news():
|
||||
return render_template('main/news.html.j2', title='News')
|
||||
|
||||
|
||||
@main.route('/privacy_policy')
|
||||
def privacy_policy():
|
||||
return render_template('main/privacy_policy.html.j2',
|
||||
title='Privacy statement (GDPR)')
|
||||
|
||||
|
||||
@main.route('/terms_of_use')
|
||||
def terms_of_use():
|
||||
return render_template('main/terms_of_use.html.j2', title='Terms of Use')
|
659
app/models.py
Normal file
@ -0,0 +1,659 @@
|
||||
from datetime import datetime
|
||||
from flask import current_app, url_for
|
||||
from flask_login import UserMixin, AnonymousUserMixin
|
||||
from itsdangerous import BadSignature, TimedJSONWebSignatureSerializer
|
||||
from time import sleep
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
import xml.etree.ElementTree as ET
|
||||
from . import db, login_manager
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
||||
class Permission:
|
||||
'''
|
||||
Defines User permissions as integers by the power of 2. User permission
|
||||
can be evaluated using the bitwise operator &. 3 equals to CREATE_JOB and
|
||||
DELETE_JOB and so on.
|
||||
'''
|
||||
MANAGE_CORPORA = 1
|
||||
MANAGE_JOBS = 2
|
||||
# PERMISSION_NAME = 4
|
||||
# PERMISSION_NAME = 8
|
||||
ADMIN = 16
|
||||
|
||||
|
||||
class Role(db.Model):
|
||||
'''
|
||||
Model for the different roles Users can have. Is a one-to-many
|
||||
relationship. A Role can be associated with many User rows.
|
||||
'''
|
||||
__tablename__ = 'roles'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Fields
|
||||
default = db.Column(db.Boolean, default=False, index=True)
|
||||
name = db.Column(db.String(64), unique=True)
|
||||
permissions = db.Column(db.Integer)
|
||||
# Relationships
|
||||
users = db.relationship('User', backref='role', lazy='dynamic')
|
||||
|
||||
def to_dict(self):
|
||||
return {'id': self.id,
|
||||
'default': self.default,
|
||||
'name': self.name,
|
||||
'permissions': self.permissions}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(Role, self).__init__(**kwargs)
|
||||
if self.permissions is None:
|
||||
self.permissions = 0
|
||||
|
||||
def __repr__(self):
|
||||
'''
|
||||
String representation of the Role. For human readability.
|
||||
'''
|
||||
return '<Role {}>'.format(self.name)
|
||||
|
||||
def add_permission(self, perm):
|
||||
'''
|
||||
Add new permission to Role. Input is a Permission.
|
||||
'''
|
||||
if not self.has_permission(perm):
|
||||
self.permissions += perm
|
||||
|
||||
def remove_permission(self, perm):
|
||||
'''
|
||||
Removes permission from a Role. Input a Permission.
|
||||
'''
|
||||
if self.has_permission(perm):
|
||||
self.permissions -= perm
|
||||
|
||||
def reset_permissions(self):
|
||||
'''
|
||||
Resets permissions to zero. Zero equals no permissions at all.
|
||||
'''
|
||||
self.permissions = 0
|
||||
|
||||
def has_permission(self, perm):
|
||||
'''
|
||||
Checks if a Role has a specific Permission. Does this with the bitwise
|
||||
operator.
|
||||
'''
|
||||
return self.permissions & perm == perm
|
||||
|
||||
@staticmethod
|
||||
def insert_roles():
|
||||
'''
|
||||
Inserts roles into the database. This has to be executed befor Users
|
||||
are added to the database. Otherwiese Users will not have a Role
|
||||
assigned to them. Order of the roles dictionary determines the ID of
|
||||
each role. Users have the ID 1 and Administrators have the ID 2.
|
||||
'''
|
||||
roles = {'User': [Permission.MANAGE_CORPORA, Permission.MANAGE_JOBS],
|
||||
'Administrator': [Permission.MANAGE_CORPORA,
|
||||
Permission.MANAGE_JOBS, Permission.ADMIN]}
|
||||
default_role = 'User'
|
||||
for r in roles:
|
||||
role = Role.query.filter_by(name=r).first()
|
||||
if role is None:
|
||||
role = Role(name=r)
|
||||
role.reset_permissions()
|
||||
for perm in roles[r]:
|
||||
role.add_permission(perm)
|
||||
role.default = (role.name == default_role)
|
||||
db.session.add(role)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
class User(UserMixin, db.Model):
|
||||
'''
|
||||
Model for Users that are registered to Opaque.
|
||||
'''
|
||||
__tablename__ = 'users'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
|
||||
# Fields
|
||||
confirmed = db.Column(db.Boolean, default=False)
|
||||
email = db.Column(db.String(254), unique=True, index=True)
|
||||
last_seen = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
password_hash = db.Column(db.String(128))
|
||||
setting_dark_mode = db.Column(db.Boolean, default=False)
|
||||
setting_job_status_mail_notifications = db.Column(db.String(16),
|
||||
default='end')
|
||||
setting_job_status_site_notifications = db.Column(db.String(16),
|
||||
default='all')
|
||||
username = db.Column(db.String(64), unique=True, index=True)
|
||||
# Relationships
|
||||
corpora = db.relationship('Corpus', backref='creator', lazy='dynamic',
|
||||
cascade='save-update, merge, delete')
|
||||
jobs = db.relationship('Job', backref='creator', lazy='dynamic',
|
||||
cascade='save-update, merge, delete')
|
||||
query_results = db.relationship('QueryResult',
|
||||
backref='creator',
|
||||
cascade='save-update, merge, delete',
|
||||
lazy='dynamic')
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return os.path.join(current_app.config['NOPAQUE_DATA_DIR'],
|
||||
str(self.id))
|
||||
|
||||
@property
|
||||
def password(self):
|
||||
raise AttributeError('password is not a readable attribute')
|
||||
|
||||
@password.setter
|
||||
def password(self, password):
|
||||
self.password_hash = generate_password_hash(password)
|
||||
|
||||
def to_dict(self):
|
||||
return {'id': self.id,
|
||||
'role_id': self.role_id,
|
||||
'confirmed': self.confirmed,
|
||||
'email': self.email,
|
||||
'last_seen': self.last_seen.timestamp(),
|
||||
'member_since': self.member_since.timestamp(),
|
||||
'settings': {'dark_mode': self.setting_dark_mode,
|
||||
'job_status_mail_notifications':
|
||||
self.setting_job_status_mail_notifications,
|
||||
'job_status_site_notifications':
|
||||
self.setting_job_status_site_notifications},
|
||||
'username': self.username,
|
||||
'corpora': {corpus.id: corpus.to_dict()
|
||||
for corpus in self.corpora},
|
||||
'jobs': {job.id: job.to_dict() for job in self.jobs},
|
||||
'query_results': {query_result.id: query_result.to_dict()
|
||||
for query_result in self.query_results},
|
||||
'role': self.role.to_dict()}
|
||||
|
||||
def __repr__(self):
|
||||
'''
|
||||
String representation of the User. For human readability.
|
||||
'''
|
||||
return '<User {}>'.format(self.username)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(User, self).__init__(**kwargs)
|
||||
if self.role is None:
|
||||
if self.email == current_app.config['NOPAQUE_ADMIN']:
|
||||
self.role = Role.query.filter_by(name='Administrator').first()
|
||||
if self.role is None:
|
||||
self.role = Role.query.filter_by(default=True).first()
|
||||
|
||||
def generate_confirmation_token(self, expiration=3600):
|
||||
'''
|
||||
Generates a confirmation token for user confirmation via email.
|
||||
'''
|
||||
s = TimedJSONWebSignatureSerializer(current_app.config['SECRET_KEY'],
|
||||
expiration)
|
||||
return s.dumps({'confirm': self.id}).decode('utf-8')
|
||||
|
||||
def generate_reset_token(self, expiration=3600):
|
||||
'''
|
||||
Generates a reset token for password reset via email.
|
||||
'''
|
||||
s = TimedJSONWebSignatureSerializer(current_app.config['SECRET_KEY'],
|
||||
expiration)
|
||||
return s.dumps({'reset': self.id}).decode('utf-8')
|
||||
|
||||
def confirm(self, token):
|
||||
'''
|
||||
Confirms User if the given token is valid and not expired.
|
||||
'''
|
||||
s = TimedJSONWebSignatureSerializer(current_app.config['SECRET_KEY'])
|
||||
try:
|
||||
data = s.loads(token.encode('utf-8'))
|
||||
except BadSignature:
|
||||
return False
|
||||
if data.get('confirm') != self.id:
|
||||
return False
|
||||
self.confirmed = True
|
||||
db.session.add(self)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def reset_password(token, new_password):
|
||||
'''
|
||||
Resets password for User if the given token is valid and not expired.
|
||||
'''
|
||||
s = TimedJSONWebSignatureSerializer(current_app.config['SECRET_KEY'])
|
||||
try:
|
||||
data = s.loads(token.encode('utf-8'))
|
||||
except BadSignature:
|
||||
return False
|
||||
user = User.query.get(data.get('reset'))
|
||||
if user is None:
|
||||
return False
|
||||
user.password = new_password
|
||||
db.session.add(user)
|
||||
return True
|
||||
|
||||
def verify_password(self, password):
|
||||
return check_password_hash(self.password_hash, password)
|
||||
|
||||
def can(self, perm):
|
||||
'''
|
||||
Checks if a User with its current role can doe something. Checks if the
|
||||
associated role actually has the needed Permission.
|
||||
'''
|
||||
return self.role is not None and self.role.has_permission(perm)
|
||||
|
||||
def is_administrator(self):
|
||||
'''
|
||||
Checks if User has Admin permissions.
|
||||
'''
|
||||
return self.can(Permission.ADMIN)
|
||||
|
||||
def delete(self):
|
||||
'''
|
||||
Delete the user and its corpora and jobs from database and filesystem.
|
||||
'''
|
||||
shutil.rmtree(self.path, ignore_errors=True)
|
||||
db.session.delete(self)
|
||||
|
||||
|
||||
class AnonymousUser(AnonymousUserMixin):
|
||||
'''
|
||||
Model replaces the default AnonymousUser.
|
||||
'''
|
||||
|
||||
def can(self, permissions):
|
||||
return False
|
||||
|
||||
def is_administrator(self):
|
||||
return False
|
||||
|
||||
|
||||
class JobInput(db.Model):
|
||||
'''
|
||||
Class to define JobInputs.
|
||||
'''
|
||||
__tablename__ = 'job_inputs'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
job_id = db.Column(db.Integer, db.ForeignKey('jobs.id'))
|
||||
# Fields
|
||||
filename = db.Column(db.String(255))
|
||||
|
||||
@property
|
||||
def download_url(self):
|
||||
return url_for('jobs.download_job_input', job_id=self.job_id,
|
||||
job_input_id=self.id)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return os.path.join(self.job.path, self.filename)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for('jobs.job', job_id=self.job_id,
|
||||
_anchor='job-{}-input-{}'.format(self.job_id, self.id))
|
||||
|
||||
def __repr__(self):
|
||||
'''
|
||||
String representation of the JobInput. For human readability.
|
||||
'''
|
||||
return '<JobInput {}>'.format(self.filename)
|
||||
|
||||
def to_dict(self):
|
||||
return {'download_url': self.download_url,
|
||||
'url': self.url,
|
||||
'id': self.id,
|
||||
'job_id': self.job_id,
|
||||
'filename': self.filename}
|
||||
|
||||
|
||||
class JobResult(db.Model):
|
||||
'''
|
||||
Class to define JobResults.
|
||||
'''
|
||||
__tablename__ = 'job_results'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
job_id = db.Column(db.Integer, db.ForeignKey('jobs.id'))
|
||||
# Fields
|
||||
filename = db.Column(db.String(255))
|
||||
|
||||
@property
|
||||
def download_url(self):
|
||||
return url_for('jobs.download_job_result', job_id=self.job_id,
|
||||
job_result_id=self.id)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return os.path.join(self.job.path, 'output', self.filename)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for('jobs.job', job_id=self.job_id,
|
||||
_anchor='job-{}-result-{}'.format(self.job_id, self.id))
|
||||
|
||||
def __repr__(self):
|
||||
'''
|
||||
String representation of the JobResult. For human readability.
|
||||
'''
|
||||
return '<JobResult {}>'.format(self.filename)
|
||||
|
||||
def to_dict(self):
|
||||
return {'download_url': self.download_url,
|
||||
'url': self.url,
|
||||
'id': self.id,
|
||||
'job_id': self.job_id,
|
||||
'filename': self.filename}
|
||||
|
||||
|
||||
class Job(db.Model):
|
||||
'''
|
||||
Class to define Jobs.
|
||||
'''
|
||||
__tablename__ = 'jobs'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
# Fields
|
||||
creation_date = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
description = db.Column(db.String(255))
|
||||
end_date = db.Column(db.DateTime())
|
||||
service = db.Column(db.String(64))
|
||||
'''
|
||||
' Service specific arguments as string list.
|
||||
' Example: ["-l eng", "--binarize"]
|
||||
'''
|
||||
service_args = db.Column(db.String(255))
|
||||
service_version = db.Column(db.String(16))
|
||||
status = db.Column(db.String(16))
|
||||
title = db.Column(db.String(32))
|
||||
# Relationships
|
||||
inputs = db.relationship('JobInput', backref='job', lazy='dynamic',
|
||||
cascade='save-update, merge, delete')
|
||||
results = db.relationship('JobResult', backref='job', lazy='dynamic',
|
||||
cascade='save-update, merge, delete')
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return os.path.join(self.creator.path, 'jobs', str(self.id))
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for('jobs.job', job_id=self.id)
|
||||
|
||||
def __repr__(self):
|
||||
'''
|
||||
String representation of the Job. For human readability.
|
||||
'''
|
||||
return '<Job {}>'.format(self.title)
|
||||
|
||||
def delete(self):
|
||||
'''
|
||||
Delete the job and its inputs and results from the database.
|
||||
'''
|
||||
if self.status not in ['complete', 'failed']:
|
||||
self.status = 'canceling'
|
||||
db.session.commit()
|
||||
while self.status != 'canceled':
|
||||
# In case the daemon handled a job in any way
|
||||
if self.status != 'canceling':
|
||||
self.status = 'canceling'
|
||||
db.session.commit()
|
||||
sleep(1)
|
||||
db.session.refresh(self)
|
||||
shutil.rmtree(self.path, ignore_errors=True)
|
||||
db.session.delete(self)
|
||||
|
||||
def restart(self):
|
||||
'''
|
||||
Restart a job - only if the status is complete or failed
|
||||
'''
|
||||
|
||||
if self.status not in ['complete', 'failed']:
|
||||
raise Exception('Could not restart job: status is not "complete/failed"') # noqa
|
||||
shutil.rmtree(os.path.join(self.path, 'output'), ignore_errors=True)
|
||||
shutil.rmtree(os.path.join(self.path, 'pyflow.data'), ignore_errors=True) # noqa
|
||||
self.end_date = None
|
||||
self.status = 'submitted'
|
||||
|
||||
def to_dict(self):
|
||||
return {'url': self.url,
|
||||
'id': self.id,
|
||||
'user_id': self.user_id,
|
||||
'creation_date': self.creation_date.timestamp(),
|
||||
'description': self.description,
|
||||
'end_date': (self.end_date.timestamp() if self.end_date else
|
||||
None),
|
||||
'service': self.service,
|
||||
'service_args': self.service_args,
|
||||
'service_version': self.service_version,
|
||||
'status': self.status,
|
||||
'title': self.title,
|
||||
'inputs': {input.id: input.to_dict() for input in self.inputs},
|
||||
'results': {result.id: result.to_dict()
|
||||
for result in self.results}}
|
||||
|
||||
|
||||
class CorpusFile(db.Model):
|
||||
'''
|
||||
Class to define Files.
|
||||
'''
|
||||
__tablename__ = 'corpus_files'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
corpus_id = db.Column(db.Integer, db.ForeignKey('corpora.id'))
|
||||
# Fields
|
||||
address = db.Column(db.String(255))
|
||||
author = db.Column(db.String(255))
|
||||
booktitle = db.Column(db.String(255))
|
||||
chapter = db.Column(db.String(255))
|
||||
editor = db.Column(db.String(255))
|
||||
filename = db.Column(db.String(255))
|
||||
institution = db.Column(db.String(255))
|
||||
journal = db.Column(db.String(255))
|
||||
pages = db.Column(db.String(255))
|
||||
publisher = db.Column(db.String(255))
|
||||
publishing_year = db.Column(db.Integer)
|
||||
school = db.Column(db.String(255))
|
||||
title = db.Column(db.String(255))
|
||||
|
||||
@property
|
||||
def download_url(self):
|
||||
return url_for('corpora.download_corpus_file',
|
||||
corpus_id=self.corpus_id, corpus_file_id=self.id)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return os.path.join(self.corpus.path, self.filename)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for('corpora.corpus_file', corpus_id=self.corpus_id,
|
||||
corpus_file_id=self.id)
|
||||
|
||||
def delete(self):
|
||||
try:
|
||||
os.remove(self.path)
|
||||
except OSError:
|
||||
logging.error('Removing {} led to an OSError!'.format(self.path))
|
||||
pass
|
||||
db.session.delete(self)
|
||||
self.corpus.status = 'unprepared'
|
||||
|
||||
def to_dict(self):
|
||||
return {'download_url': self.download_url,
|
||||
'url': self.url,
|
||||
'id': self.id,
|
||||
'corpus_id': self.corpus_id,
|
||||
'address': self.address,
|
||||
'author': self.author,
|
||||
'booktitle': self.booktitle,
|
||||
'chapter': self.chapter,
|
||||
'editor': self.editor,
|
||||
'filename': self.filename,
|
||||
'institution': self.institution,
|
||||
'journal': self.journal,
|
||||
'pages': self.pages,
|
||||
'publisher': self.publisher,
|
||||
'publishing_year': self.publishing_year,
|
||||
'school': self.school,
|
||||
'title': self.title}
|
||||
|
||||
|
||||
class Corpus(db.Model):
|
||||
'''
|
||||
Class to define a corpus.
|
||||
'''
|
||||
__tablename__ = 'corpora'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
# Fields
|
||||
creation_date = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
current_nr_of_tokens = db.Column(db.Integer, default=0)
|
||||
description = db.Column(db.String(255))
|
||||
last_edited_date = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
max_nr_of_tokens = 2147483647
|
||||
status = db.Column(db.String(16), default='unprepared')
|
||||
title = db.Column(db.String(32))
|
||||
archive_file = db.Column(db.String(255))
|
||||
# Relationships
|
||||
files = db.relationship('CorpusFile', backref='corpus', lazy='dynamic',
|
||||
cascade='save-update, merge, delete')
|
||||
|
||||
@property
|
||||
def analysis_url(self):
|
||||
return url_for('corpora.analyse_corpus', corpus_id=self.id)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return os.path.join(self.creator.path, 'corpora', str(self.id))
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for('corpora.corpus', corpus_id=self.id)
|
||||
|
||||
def to_dict(self):
|
||||
return {'analysis_url': self.analysis_url,
|
||||
'url': self.url,
|
||||
'id': self.id,
|
||||
'user_id': self.user_id,
|
||||
'creation_date': self.creation_date.timestamp(),
|
||||
'current_nr_of_tokens': self.current_nr_of_tokens,
|
||||
'description': self.description,
|
||||
'status': self.status,
|
||||
'last_edited_date': self.last_edited_date.timestamp(),
|
||||
'max_nr_of_tokens': self.max_nr_of_tokens,
|
||||
'title': self.title,
|
||||
'files': {file.id: file.to_dict() for file in self.files}}
|
||||
|
||||
def build(self):
|
||||
output_dir = os.path.join(self.path, 'merged')
|
||||
shutil.rmtree(output_dir, ignore_errors=True)
|
||||
os.mkdir(output_dir)
|
||||
master_element_tree = ET.ElementTree(
|
||||
ET.fromstring('<corpus>\n</corpus>')
|
||||
)
|
||||
for corpus_file in self.files:
|
||||
element_tree = ET.parse(corpus_file.path)
|
||||
text_node = element_tree.find('text')
|
||||
text_node.set('address', corpus_file.address or "NULL")
|
||||
text_node.set('author', corpus_file.author)
|
||||
text_node.set('booktitle', corpus_file.booktitle or "NULL")
|
||||
text_node.set('chapter', corpus_file.chapter or "NULL")
|
||||
text_node.set('editor', corpus_file.editor or "NULL")
|
||||
text_node.set('institution', corpus_file.institution or "NULL")
|
||||
text_node.set('journal', corpus_file.journal or "NULL")
|
||||
text_node.set('pages', corpus_file.pages or "NULL")
|
||||
text_node.set('publisher', corpus_file.publisher or "NULL")
|
||||
text_node.set('publishing_year', str(corpus_file.publishing_year))
|
||||
text_node.set('school', corpus_file.school or "NULL")
|
||||
text_node.set('title', corpus_file.title)
|
||||
element_tree.write(corpus_file.path)
|
||||
master_element_tree.getroot().insert(1, text_node)
|
||||
output_file = os.path.join(output_dir, 'corpus.vrt')
|
||||
master_element_tree.write(output_file,
|
||||
xml_declaration=True,
|
||||
encoding='utf-8')
|
||||
self.last_edited_date = datetime.utcnow()
|
||||
self.status = 'submitted'
|
||||
|
||||
def delete(self):
|
||||
shutil.rmtree(self.path, ignore_errors=True)
|
||||
db.session.delete(self)
|
||||
|
||||
def __repr__(self):
|
||||
'''
|
||||
String representation of the corpus. For human readability.
|
||||
'''
|
||||
return '<Corpus {}>'.format(self.title)
|
||||
|
||||
|
||||
class QueryResult(db.Model):
|
||||
'''
|
||||
Class to define a corpus analysis result.
|
||||
'''
|
||||
__tablename__ = 'query_results'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
# Fields
|
||||
description = db.Column(db.String(255))
|
||||
filename = db.Column(db.String(255))
|
||||
query_metadata = db.Column(db.JSON())
|
||||
title = db.Column(db.String(32))
|
||||
|
||||
@property
|
||||
def download_url(self):
|
||||
return url_for('corpora.download_query_result',
|
||||
query_result_id=self.id)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return os.path.join(
|
||||
self.creator.path, 'query_results', str(self.id), self.filename)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for('corpora.query_result', query_result_id=self.id)
|
||||
|
||||
def delete(self):
|
||||
shutil.rmtree(self.path, ignore_errors=True)
|
||||
db.session.delete(self)
|
||||
|
||||
def to_dict(self):
|
||||
return {'download_url': self.download_url,
|
||||
'url': self.url,
|
||||
'id': self.id,
|
||||
'user_id': self.user_id,
|
||||
'corpus_title': self.query_metadata['corpus_name'],
|
||||
'description': self.description,
|
||||
'filename': self.filename,
|
||||
'query': self.query_metadata['query'],
|
||||
'query_metadata': self.query_metadata,
|
||||
'title': self.title}
|
||||
|
||||
def __repr__(self):
|
||||
'''
|
||||
String representation of the QueryResult. For human readability.
|
||||
'''
|
||||
return '<QueryResult {}>'.format(self.title)
|
||||
|
||||
|
||||
'''
|
||||
' Flask-Login is told to use the application’s custom anonymous user by setting
|
||||
' its class in the login_manager.anonymous_user attribute.
|
||||
'''
|
||||
login_manager.anonymous_user = AnonymousUser
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
return User.query.get(int(user_id))
|
80
app/services/__init__.py
Normal file
@ -0,0 +1,80 @@
|
||||
from flask import Blueprint
|
||||
|
||||
|
||||
SERVICES = {
|
||||
'corpus_analysis': {
|
||||
'name': 'Corpus analysis'
|
||||
},
|
||||
'file_setup': {
|
||||
'name': 'File setup',
|
||||
'versions': {
|
||||
'latest': '1.0.0b',
|
||||
'1.0.0b': {
|
||||
'publishing_data': {
|
||||
'date': None,
|
||||
'title': 'nopaque File setup service',
|
||||
'url': 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/file-setup/-/tree/1.0.0b', # noqa
|
||||
'version': '1.0.0'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'nlp': {
|
||||
'name': 'Natural Language Processing',
|
||||
'versions': {
|
||||
'latest': '1.0.0b',
|
||||
'1.0.0b': {
|
||||
'check_encoding': True,
|
||||
'models': {
|
||||
'de': 'German',
|
||||
'en': 'English',
|
||||
'it': 'Italian',
|
||||
'nl': 'Dutch',
|
||||
'pl': 'Polish',
|
||||
'zh': 'Chinese'
|
||||
},
|
||||
'publishing_data': {
|
||||
'date': None,
|
||||
'title': 'nopaque NLP service',
|
||||
'url': 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/nlp/-/tree/1.0.0b', # noqa
|
||||
'version': '1.0.0'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'ocr': {
|
||||
'name': 'Optical Character Recognition',
|
||||
'versions': {
|
||||
'latest': '1.0.0b',
|
||||
'1.0.0b': {
|
||||
'binarization': True,
|
||||
'models': {
|
||||
'ara': 'Arabic',
|
||||
'chi_tra': 'Chinese - Traditional',
|
||||
'dan': 'Danish',
|
||||
'eng': 'English',
|
||||
'enm': 'English, Middle 1100-1500',
|
||||
'fra': 'French',
|
||||
'frm': 'French, Middle ca. 1400-1600',
|
||||
'deu': 'German',
|
||||
'frk': 'German Fraktur',
|
||||
'ell': 'Greek, Modern (1453-)',
|
||||
'ita': 'Italian',
|
||||
'por': 'Portuguese',
|
||||
'rus': 'Russian',
|
||||
'spa': 'Spanish; Castilian',
|
||||
},
|
||||
'publishing_data': {
|
||||
'date': None,
|
||||
'title': 'nopaque OCR service',
|
||||
'url': 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/ocr/-/tree/1.0.0b', # noqa
|
||||
'version': '1.0.0'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
services = Blueprint('services', __name__)
|
||||
from . import views
|
83
app/services/forms.py
Normal file
@ -0,0 +1,83 @@
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import (BooleanField, MultipleFileField, SelectField, StringField,
|
||||
SubmitField, ValidationError)
|
||||
from wtforms.validators import DataRequired, Length
|
||||
from . import SERVICES
|
||||
|
||||
|
||||
class AddJobForm(FlaskForm):
|
||||
description = StringField('Description',
|
||||
validators=[DataRequired(), Length(1, 255)])
|
||||
submit = SubmitField()
|
||||
title = StringField('Title', validators=[DataRequired(), Length(1, 32)])
|
||||
version = SelectField('Version', validators=[DataRequired()])
|
||||
|
||||
|
||||
class AddNLPJobForm(AddJobForm):
|
||||
check_encoding = BooleanField('Check encoding')
|
||||
files = MultipleFileField('Files', validators=[DataRequired()])
|
||||
language = SelectField('Language', choices=[('', 'Choose your option')],
|
||||
default='', validators=[DataRequired()])
|
||||
|
||||
def validate_check_encoding(self, field):
|
||||
if field.data and 'check_encoding' not in SERVICES['nlp']['versions'][self.version.data]: # noqa
|
||||
raise ValidationError('Check encoding is not available in this version') # noqa
|
||||
|
||||
def validate_files(form, field):
|
||||
for file in field.data:
|
||||
if not file.filename.lower().endswith('.txt'):
|
||||
raise ValidationError('File does not have an approved '
|
||||
'extension: .txt')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
version = kwargs.pop('version', SERVICES['nlp']['versions']['latest'])
|
||||
super().__init__(*args, **kwargs)
|
||||
if 'check_encoding' not in SERVICES['nlp']['versions'][version]:
|
||||
self.check_encoding.render_kw = {'disabled': True}
|
||||
self.language.choices += [(x, y) for x, y in SERVICES['nlp']['versions'][version]['models'].items()] # noqa
|
||||
self.version.choices = [(x, x) for x in SERVICES['nlp']['versions'] if x != 'latest'] # noqa
|
||||
self.version.default = version
|
||||
|
||||
|
||||
class AddOCRJobForm(AddJobForm):
|
||||
binarization = BooleanField('Binarazation')
|
||||
files = MultipleFileField('Files', validators=[DataRequired()])
|
||||
language = SelectField('Language', choices=[('', 'Choose your option')],
|
||||
default='', validators=[DataRequired()])
|
||||
|
||||
def validate_binarization(self, field):
|
||||
if field.data and 'binarization' not in SERVICES['ocr']['versions'][self.version.data]: # noqa
|
||||
raise ValidationError('Binarization is not available in this version') # noqa
|
||||
|
||||
def validate_files(self, field):
|
||||
for file in field.data:
|
||||
if not file.filename.lower().endswith('.pdf'):
|
||||
raise ValidationError('File does not have an approved '
|
||||
'extension: .pdf')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
version = kwargs.pop('version', SERVICES['ocr']['versions']['latest'])
|
||||
super().__init__(*args, **kwargs)
|
||||
if 'binarization' not in SERVICES['ocr']['versions'][version]:
|
||||
self.binarization.render_kw = {'disabled': True}
|
||||
self.language.choices += [(x, y) for x, y in SERVICES['ocr']['versions'][version]['models'].items()] # noqa
|
||||
self.version.choices = [(x, x) for x in SERVICES['ocr']['versions'] if x != 'latest'] # noqa
|
||||
self.version.default = version
|
||||
|
||||
|
||||
class AddFileSetupJobForm(AddJobForm):
|
||||
files = MultipleFileField('Files', validators=[DataRequired()])
|
||||
|
||||
def validate_files(form, field):
|
||||
for file in field.data:
|
||||
if not file.filename.lower().endswith(('.jpeg', '.jpg', '.png',
|
||||
'.tiff', '.tif')):
|
||||
raise ValidationError('File does not have an approved '
|
||||
'extension: .jpeg | .jpg | .png | .tiff '
|
||||
'| .tif')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
version = kwargs.pop('version', SERVICES['file_setup']['versions']['latest'])
|
||||
super().__init__(*args, **kwargs)
|
||||
self.version.choices = [(x, x) for x in SERVICES['file_setup']['versions'] if x != 'latest'] # noqa
|
||||
self.version.default = version
|
85
app/services/views.py
Normal file
@ -0,0 +1,85 @@
|
||||
from flask import (abort, flash, make_response, render_template, request,
|
||||
url_for)
|
||||
from flask_login import current_user, login_required
|
||||
from werkzeug.utils import secure_filename
|
||||
from . import services
|
||||
from . import SERVICES
|
||||
from .forms import AddFileSetupJobForm, AddNLPJobForm, AddOCRJobForm
|
||||
from .. import db, socketio
|
||||
from ..models import Job, JobInput
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
||||
@services.route('/<service>', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def service(service):
|
||||
if service not in SERVICES:
|
||||
abort(404)
|
||||
if service == 'corpus_analysis':
|
||||
return render_template('services/{}.html.j2'.format(service),
|
||||
title=SERVICES[service]['name'])
|
||||
elif service == 'file_setup':
|
||||
form = AddFileSetupJobForm(prefix='add-file-setup-job-form')
|
||||
elif service == 'nlp':
|
||||
version = request.args.get('version')
|
||||
if version is None or version not in SERVICES[service]['versions']:
|
||||
form = AddNLPJobForm(prefix='add-nlp-job-form')
|
||||
else:
|
||||
form = AddNLPJobForm(prefix='add-nlp-job-form', version=version)
|
||||
form.version.data = version
|
||||
elif service == 'ocr':
|
||||
version = request.args.get('version')
|
||||
if version is None or version not in SERVICES[service]['versions']:
|
||||
form = AddOCRJobForm(prefix='add-ocr-job-form')
|
||||
else:
|
||||
form = AddOCRJobForm(prefix='add-ocr-job-form', version=version)
|
||||
form.version.data = version
|
||||
if form.is_submitted():
|
||||
if not form.validate():
|
||||
logging.error(form.errors)
|
||||
return make_response(form.errors, 400)
|
||||
service_args = []
|
||||
if service == 'nlp':
|
||||
service_args.append('-l {}'.format(form.language.data))
|
||||
if form.check_encoding.data:
|
||||
service_args.append('--check-encoding')
|
||||
if service == 'ocr':
|
||||
service_args.append('-l {}'.format(form.language.data))
|
||||
if form.binarization.data:
|
||||
service_args.append('--binarize')
|
||||
job = Job(creator=current_user,
|
||||
description=form.description.data,
|
||||
service=service, service_args=json.dumps(service_args),
|
||||
service_version=form.version.data,
|
||||
status='preparing', title=form.title.data)
|
||||
db.session.add(job)
|
||||
db.session.flush()
|
||||
db.session.refresh(job)
|
||||
try:
|
||||
os.makedirs(job.path)
|
||||
except OSError:
|
||||
logging.error('Make dir {} led to an OSError!'.format(job.path))
|
||||
db.session.rollback()
|
||||
flash('Internal Server Error', 'error')
|
||||
return make_response(
|
||||
{'redirect_url': url_for('.service', service=service)}, 500)
|
||||
else:
|
||||
for file in form.files.data:
|
||||
filename = secure_filename(file.filename)
|
||||
job_input = JobInput(filename=filename, job=job)
|
||||
file.save(job_input.path)
|
||||
db.session.add(job_input)
|
||||
job.status = 'submitted'
|
||||
db.session.commit()
|
||||
flash('Job "{}" added'.format(job.title), 'job')
|
||||
event = 'user_{}_patch'.format(job.user_id)
|
||||
jsonpatch = [{'op': 'add', 'path': '/jobs/{}'.format(job.id), 'value': job.to_dict()}] # noqa
|
||||
room = 'user_{}'.format(job.user_id)
|
||||
socketio.emit(event, jsonpatch, room=room)
|
||||
return make_response(
|
||||
{'redirect_url': url_for('jobs.job', job_id=job.id)}, 201)
|
||||
return render_template('services/{}.html.j2'.format(service),
|
||||
form=form, title=SERVICES[service]['name'],
|
||||
versions=SERVICES[service]['versions'])
|
5
app/settings/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from flask import Blueprint
|
||||
|
||||
|
||||
settings = Blueprint('settings', __name__)
|
||||
from . import views # noqa
|
78
app/settings/forms.py
Normal file
@ -0,0 +1,78 @@
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import (BooleanField, PasswordField, SelectField, StringField,
|
||||
SubmitField, ValidationError)
|
||||
from wtforms.validators import DataRequired, Email, EqualTo, Length, Regexp
|
||||
from ..models import User
|
||||
|
||||
|
||||
class ChangePasswordForm(FlaskForm):
|
||||
password = PasswordField('Old password', validators=[DataRequired()])
|
||||
new_password = PasswordField(
|
||||
'New password',
|
||||
validators=[DataRequired(), EqualTo('password_confirmation',
|
||||
message='Passwords must match.')]
|
||||
)
|
||||
new_password2 = PasswordField(
|
||||
'Confirm new password', validators=[DataRequired()])
|
||||
submit = SubmitField('Change password')
|
||||
|
||||
def __init__(self, user=current_user, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.user = user
|
||||
|
||||
def validate_current_password(self, field):
|
||||
if not self.user.verify_password(field.data):
|
||||
raise ValidationError('Invalid password.')
|
||||
|
||||
|
||||
class EditGeneralSettingsForm(FlaskForm):
|
||||
dark_mode = BooleanField('Dark mode')
|
||||
email = StringField('E-Mail',
|
||||
validators=[DataRequired(), Length(1, 254), Email()])
|
||||
username = StringField(
|
||||
'Benutzername',
|
||||
validators=[DataRequired(),
|
||||
Length(1, 64),
|
||||
Regexp(current_app.config['NOPAQUE_USERNAME_REGEX'],
|
||||
message='Usernames must have only letters, numbers,'
|
||||
' dots or underscores')]
|
||||
)
|
||||
submit = SubmitField('Submit')
|
||||
|
||||
def __init__(self, user=current_user, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.user = user
|
||||
|
||||
def validate_email(self, field):
|
||||
if (field.data != self.user.email
|
||||
and User.query.filter_by(email=field.data).first()):
|
||||
raise ValidationError('Email already registered.')
|
||||
|
||||
def validate_username(self, field):
|
||||
if (field.data != self.user.username
|
||||
and User.query.filter_by(username=field.data).first()):
|
||||
raise ValidationError('Username already in use.')
|
||||
|
||||
|
||||
class EditNotificationSettingsForm(FlaskForm):
|
||||
job_status_mail_notifications = SelectField(
|
||||
'Job status mail notifications',
|
||||
choices=[('', 'Choose your option'),
|
||||
('all', 'Notify on all status changes'),
|
||||
('end', 'Notify only when a job ended'),
|
||||
('none', 'No status update notifications')],
|
||||
validators=[DataRequired()])
|
||||
job_status_site_notifications = SelectField(
|
||||
'Job status site notifications',
|
||||
choices=[('', 'Choose your option'),
|
||||
('all', 'Notify on all status changes'),
|
||||
('end', 'Notify only when a job ended'),
|
||||
('none', 'No status update notifications')],
|
||||
validators=[DataRequired()])
|
||||
submit = SubmitField('Save settings')
|
||||
|
||||
def __init__(self, user=current_user, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.user = user
|
13
app/settings/tasks.py
Normal file
@ -0,0 +1,13 @@
|
||||
from .. import db
|
||||
from ..decorators import background
|
||||
from ..models import User
|
||||
|
||||
|
||||
@background
|
||||
def delete_user(user_id, *args, **kwargs):
|
||||
with kwargs['app'].app_context():
|
||||
user = User.query.get(user_id)
|
||||
if user is None:
|
||||
raise Exception('User {} not found'.format(user_id))
|
||||
user.delete()
|
||||
db.session.commit()
|
75
app/settings/views.py
Normal file
@ -0,0 +1,75 @@
|
||||
from flask import flash, redirect, render_template, url_for
|
||||
from flask_login import current_user, login_required, logout_user
|
||||
from . import settings, tasks
|
||||
from .forms import (ChangePasswordForm, EditGeneralSettingsForm,
|
||||
EditNotificationSettingsForm)
|
||||
from .. import db
|
||||
|
||||
|
||||
@settings.route('/')
|
||||
@login_required
|
||||
def index():
|
||||
return redirect(url_for('.edit_general_settings'))
|
||||
|
||||
|
||||
@settings.route('/change_password', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def change_password():
|
||||
form = ChangePasswordForm()
|
||||
if form.validate_on_submit():
|
||||
current_user.password = form.new_password.data
|
||||
db.session.commit()
|
||||
flash('Your password has been updated.')
|
||||
return redirect(url_for('.change_password'))
|
||||
return render_template('settings/change_password.html.j2',
|
||||
form=form, title='Change password')
|
||||
|
||||
|
||||
@settings.route('/edit_general_settings', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def edit_general_settings():
|
||||
form = EditGeneralSettingsForm()
|
||||
if form.validate_on_submit():
|
||||
current_user.email = form.email.data
|
||||
current_user.setting_dark_mode = form.dark_mode.data
|
||||
current_user.username = form.username.data
|
||||
db.session.commit()
|
||||
flash('Your changes have been saved.')
|
||||
return redirect(url_for('.edit_general_settings'))
|
||||
form.dark_mode.data = current_user.setting_dark_mode
|
||||
form.email.data = current_user.email
|
||||
form.username.data = current_user.username
|
||||
return render_template('settings/edit_general_settings.html.j2',
|
||||
form=form, title='General settings')
|
||||
|
||||
|
||||
@settings.route('/edit_notification_settings', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def edit_notification_settings():
|
||||
form = EditNotificationSettingsForm()
|
||||
if form.validate_on_submit():
|
||||
current_user.setting_job_status_mail_notifications = \
|
||||
form.job_status_mail_notifications.data
|
||||
current_user.setting_job_status_site_notifications = \
|
||||
form.job_status_site_notifications.data
|
||||
db.session.commit()
|
||||
flash('Your changes have been saved.')
|
||||
return redirect(url_for('.edit_notification_settings'))
|
||||
form.job_status_mail_notifications.data = \
|
||||
current_user.setting_job_status_mail_notifications
|
||||
form.job_status_site_notifications.data = \
|
||||
current_user.setting_job_status_site_notifications
|
||||
return render_template('settings/edit_notification_settings.html.j2',
|
||||
form=form, title='Notification settings')
|
||||
|
||||
|
||||
@settings.route('/delete')
|
||||
@login_required
|
||||
def delete():
|
||||
"""
|
||||
View to delete current_user and all associated data.
|
||||
"""
|
||||
tasks.delete_user(current_user.id)
|
||||
logout_user()
|
||||
flash('Your account has been marked for deletion!')
|
||||
return redirect(url_for('main.index'))
|
39
app/static/css/material_icons.css
Normal file
@ -0,0 +1,39 @@
|
||||
/* https://google.github.io/material-design-icons/#setup-method-2-self-hosting */
|
||||
|
||||
@font-face {
|
||||
font-family: 'Material Icons';
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
src: local('Material Icons'),
|
||||
local('MaterialIcons-Regular'),
|
||||
url(../fonts/material_icons/MaterialIcons-Regular.ttf) format('truetype'),
|
||||
url(../fonts/material_icons/MaterialIconsOutlined-Regular.otf) format('opentype'),
|
||||
url(../fonts/material_icons/MaterialIconsRound-Regular.otf) format('opentype'),
|
||||
url(../fonts/material_icons/MaterialIconsSharp-Regular.otf) format('opentype'),
|
||||
url(../fonts/material_icons/MaterialIconsTwoTone-Regular.otf) format('opentype');
|
||||
}
|
||||
|
||||
.material-icons {
|
||||
font-family: 'Material Icons';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
font-size: 24px; /* Preferred icon size */
|
||||
display: inline-block;
|
||||
line-height: 1;
|
||||
text-transform: none;
|
||||
letter-spacing: normal;
|
||||
word-wrap: normal;
|
||||
white-space: nowrap;
|
||||
direction: ltr;
|
||||
|
||||
/* Support for all WebKit browsers. */
|
||||
-webkit-font-smoothing: antialiased;
|
||||
/* Support for Safari and Chrome. */
|
||||
text-rendering: optimizeLegibility;
|
||||
|
||||
/* Support for Firefox. */
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
|
||||
/* Support for IE. */
|
||||
font-feature-settings: 'liga';
|
||||
}
|
13
app/static/css/materialize.min.css
vendored
Normal file
9
app/static/css/materialize_fixes.css
Normal file
@ -0,0 +1,9 @@
|
||||
/* Fix material icon vertical alignment when nested in various elements */
|
||||
h1 .nopaque-icons, h2 .nopaque-icons, h3 .nopaque-icons, h4 .nopaque-icons,
|
||||
.tab .nopaque-icons, .tab .material-icons {
|
||||
line-height: inherit;
|
||||
}
|
||||
|
||||
.parallax-container .parallax {
|
||||
z-index: 0;
|
||||
}
|
109
app/static/css/nopaque.css
Normal file
@ -0,0 +1,109 @@
|
||||
/* Change navbar height bacause an extended and fixed navbar is used */
|
||||
.navbar-fixed {
|
||||
height: 112px;
|
||||
}
|
||||
|
||||
/* add custom bold class */
|
||||
.bold {font-weight: bold;}
|
||||
|
||||
/* Change placholdertext color of file uplaod fields */
|
||||
::placeholder {
|
||||
color: #9e9e9e;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
/* preloader circle in the size of a button icon */
|
||||
.button-icon-spinner {
|
||||
bottom: -5px !important;
|
||||
right: 55px !important;
|
||||
margin-right: 12px !important;
|
||||
width: 19.5px !important;
|
||||
height: 19.5px !important;
|
||||
}
|
||||
|
||||
/*
|
||||
* changes preoloader size etc. to fit visually better with the chip status
|
||||
* indicator of jobs
|
||||
*/
|
||||
.status-spinner {
|
||||
margin-bottom: -10px;
|
||||
width: 30px !important;
|
||||
height: 30px !important;
|
||||
}
|
||||
|
||||
/* flat-interaction addition to show background color */
|
||||
|
||||
.flat-interaction {
|
||||
background-color: #DCDCDC;
|
||||
width: 100%;
|
||||
margin-bottom: 3px;
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.flat-interaction:hover {
|
||||
background-color: #E6E6FA !important;
|
||||
}
|
||||
|
||||
/* CSS for clickable th elements in tables. Needed for sortable table data with
|
||||
list js. On click on th header elements will be sorted accordingly. Also a caret
|
||||
indicator will show up how the column is sorted right now.; */
|
||||
.sort {
|
||||
cursor: pointer;
|
||||
}
|
||||
.sort:after {
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-left: 5px solid transparent;
|
||||
border-right: 5px solid transparent;
|
||||
border-bottom: 5px solid transparent;
|
||||
content:"";
|
||||
position: relative;
|
||||
top:-10px;
|
||||
right:-5px;
|
||||
}
|
||||
.sort.asc:after {
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-left: 5px solid transparent;
|
||||
border-right: 5px solid transparent;
|
||||
border-top: 5px solid #000000;
|
||||
content:"";
|
||||
position: relative;
|
||||
top:13px;
|
||||
right:-5px;
|
||||
}
|
||||
.sort.desc:after {
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-left: 5px solid transparent;
|
||||
border-right: 5px solid transparent;
|
||||
border-bottom: 5px solid #000000;
|
||||
content:"";
|
||||
position: relative;
|
||||
top:-10px;
|
||||
right:-5px;
|
||||
}
|
||||
|
||||
.show-if-only-child:not(:only-child) {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/* class for expert view */
|
||||
.expert-view {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.btn-scale-x2 {
|
||||
transform: scale(2);
|
||||
}
|
||||
|
||||
.btn-scale-x2 .nopaque-icons.service-icon {
|
||||
font-size: 2.5rem;
|
||||
}
|
||||
|
||||
.nopaque-icons.service-icon[data-service="corpus-analysis"]:empty:before {content: "H";}
|
||||
.nopaque-icons.service-icon[data-service="file-setup"]:empty:before {content: "E";}
|
||||
.nopaque-icons.service-icon[data-service="nlp"]:empty:before {content: "G";}
|
||||
.nopaque-icons.service-icon[data-service="ocr"]:empty:before {content: "F";}
|
||||
|
||||
.status-text[data-status]:empty:before {content: attr(data-status);}
|
33
app/static/css/nopaque_icons.css
Normal file
@ -0,0 +1,33 @@
|
||||
@font-face {
|
||||
font-family: 'nopaque Icons';
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
src: local('nopaque Icons'),
|
||||
local('nopaqueIcons-Regular'),
|
||||
url(../fonts/nopaque_icons/nopaqueIcons-Regular.otf) format('opentype');
|
||||
}
|
||||
|
||||
.nopaque-icons {
|
||||
font-family: 'nopaque Icons';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
font-size: 24px; /* Preferred icon size */
|
||||
display: inline-block;
|
||||
line-height: 1;
|
||||
text-transform: none;
|
||||
letter-spacing: normal;
|
||||
word-wrap: normal;
|
||||
white-space: nowrap;
|
||||
direction: ltr;
|
||||
|
||||
/* Support for all WebKit browsers. */
|
||||
-webkit-font-smoothing: antialiased;
|
||||
/* Support for Safari and Chrome. */
|
||||
text-rendering: optimizeLegibility;
|
||||
|
||||
/* Support for Firefox. */
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
|
||||
/* Support for IE. */
|
||||
font-feature-settings: 'liga';
|
||||
}
|
12
app/static/css/sidenav_fixed.css
Normal file
@ -0,0 +1,12 @@
|
||||
/*
|
||||
* The sidenav-fixed class is used which causes the sidenav to be fixed and open
|
||||
* on large screens and hides to the regular functionality on smaller screens.
|
||||
* In order to prevent the sidenav to overlap the content, the content (in our
|
||||
* case header, main and footer) gets an offset equal to the width of the
|
||||
* sidenav.
|
||||
*/
|
||||
@media only screen and (min-width : 993px) {
|
||||
header, main, footer {padding-left: 300px;}
|
||||
.modal:not(.bottom-sheet) {left: 300px;}
|
||||
.navbar-fixed > nav {width: calc(100% - 300px)}
|
||||
}
|
18
app/static/css/sticky_footer.css
Normal file
@ -0,0 +1,18 @@
|
||||
/*
|
||||
* Sticky Footer: https://materializecss.com/footer.html#sticky-footer
|
||||
* A sticky footer always stays on the bottom of the page regardless of how
|
||||
* little content is on the page. However, this footer will be pushed down if
|
||||
* there is a lot of content, so it is different from a fixed footer.
|
||||
*
|
||||
* Note: This may cause issues in Internet Explorer which has weak support for
|
||||
* flexbox.
|
||||
*/
|
||||
body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
main {
|
||||
flex: 1 0 auto;
|
||||
}
|
BIN
app/static/fonts/material_icons/MaterialIcons-Regular.ttf
Normal file
BIN
app/static/fonts/material_icons/MaterialIconsRound-Regular.otf
Normal file
BIN
app/static/fonts/material_icons/MaterialIconsSharp-Regular.otf
Normal file
BIN
app/static/fonts/material_icons/MaterialIconsTwoTone-Regular.otf
Normal file
BIN
app/static/fonts/nopaque_icons/nopaqueIcons-Regular.otf
Normal file
BIN
app/static/fonts/nopaque_icons/nopaqueIcons-Regular.woff
Normal file
BIN
app/static/fonts/nopaque_icons/nopaqueIcons-Regular.woff2
Normal file
BIN
app/static/images/logo_-_dfg.gif
Normal file
After Width: | Height: | Size: 14 KiB |
BIN
app/static/images/logo_-_sfb_1288.png
Normal file
After Width: | Height: | Size: 16 KiB |
BIN
app/static/images/nopaque_-_favicon.png
Normal file
After Width: | Height: | Size: 64 KiB |
1
app/static/images/nopaque_-_logo.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg id="Ebene_1" data-name="Ebene 1" xmlns="http://www.w3.org/2000/svg" width="66mm" height="64mm" viewBox="0 0 187.09 181.42"><rect x="56.69" y="56.69" width="70.87" height="59.68" style="fill:none"/><g id="Bildmarke_RZ" data-name="Bildmarke RZ"><path d="M119.59,87.15S108.84,98.28,92.13,98.28,64.66,87.15,64.66,87.15,75.44,76,92.13,76,119.59,87.15,119.59,87.15Z" style="fill:#fff;opacity:0.46"/><polygon points="83.77 106.92 102.88 87.15 83.77 67.38 64.66 87.15 83.77 106.92" style="fill:#fff;opacity:0.35000000000000003"/><polygon points="100.48 106.92 119.59 87.15 100.48 67.38 81.37 87.15 100.48 106.92" style="fill:#fff;opacity:0.35000000000000003"/><polygon points="81.37 87.15 92.13 98.28 102.88 87.15 92.13 76.02 81.37 87.15" style="fill:#fff;opacity:0.2"/></g></svg>
|
After Width: | Height: | Size: 777 B |
108
app/static/images/nopaque_-_logo_name_slogan.svg
Normal file
@ -0,0 +1,108 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 24.2.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Ebene_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
width="792.1px" height="181.4px" viewBox="0 0 792.1 181.4" style="enable-background:new 0 0 792.1 181.4;" xml:space="preserve"
|
||||
>
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
.st1{fill:none;}
|
||||
.st2{opacity:0.46;fill:#FFFFFF;enable-background:new ;}
|
||||
.st3{opacity:0.35;fill:#FFFFFF;enable-background:new ;}
|
||||
.st4{opacity:0.2;fill:#FFFFFF;enable-background:new ;}
|
||||
</style>
|
||||
<g id="Ebene_2_1_">
|
||||
<path class="st0" d="M403.7,82.6c-1.8,0-1.8,0.2-1.8,2.1v1.2h3.4v1.5h-3.4v8.5h-2.1v-8.5h-1.9v-1.5h1.9v-1.2c0-2.3,0.5-3.6,3.1-3.6
|
||||
c1,0,2,0.1,3,0.4v1.2C405.1,82.7,404.4,82.6,403.7,82.6z"/>
|
||||
<path class="st0" d="M411.9,87.1c-0.7,0-1.3,0-2,0.1v8.6h-2.1v-9.8c2.1-0.6,4.3-0.6,6.4,0v1.2C413.5,87.2,412.7,87.1,411.9,87.1z"
|
||||
/>
|
||||
<path class="st0" d="M420,96.1c-4.1,0-4.5-0.7-4.5-5.1c0-4.6,0.6-5.3,4.5-5.3c3.9,0,4.5,0.7,4.5,5.3
|
||||
C424.4,95.5,424.1,96.1,420,96.1z M422.3,90.9c0-3.8-0.2-3.8-2.2-3.8h-0.1c-2.1,0-2.3,0-2.3,3.8v0.2c0,3.6,0.1,3.6,2.3,3.6h0.1
|
||||
c2.2,0,2.2,0,2.2-3.6V90.9z"/>
|
||||
<path class="st0" d="M439.6,95.9v-6.7c0-2.1-0.7-2.1-2-2.1c-0.8,0-1.5,0.1-2.3,0.2c0.3,0.6,0.4,1.2,0.3,1.9v6.7h-2.1v-6.7
|
||||
c0-2.1-0.7-2.1-2-2.1c-0.7,0-1.3,0-2,0.1v8.6h-2.1v-9.8c1.3-0.3,2.7-0.4,4.1-0.4c0.9,0,1.8,0.1,2.6,0.4c1.1-0.3,2.3-0.4,3.4-0.4
|
||||
c2.6,0,4.1,0.2,4.1,3.6v6.7H439.6z"/>
|
||||
<path class="st0" d="M455,96.1c-2.6,0-3.1-1.2-3.1-3.6v-5.2H450v-1.5h1.9v-3.4h2.1v3.4h3.4v1.5H454v5.2c0,1.8,0,2.1,1.8,2.1
|
||||
c0.7,0,1.5-0.1,2.2-0.2v1.2C457.1,96,456,96.2,455,96.1z"/>
|
||||
<path class="st0" d="M462.4,91.5c0,3.2,0.1,3.3,2.5,3.3c1.3,0,2.7-0.1,4-0.2v1.2c-1.4,0.3-2.9,0.4-4.3,0.4c-3.9,0-4.3-0.7-4.3-5.1
|
||||
c0-4.6,0.6-5.3,4.5-5.3s4.5,0.7,4.5,5.3v0.5H462.4z M464.8,87.1c-2.1,0-2.3,0-2.3,3.1h4.7C467.1,87.1,466.9,87.1,464.8,87.1z"/>
|
||||
<path class="st0" d="M478.7,95.9l-2.5-4.3l-2.5,4.3h-2.2l3-5.1l-2.8-4.9h2.3l2.4,4.1l2.4-4.1h2.2l-2.8,4.9l3,5.1H478.7z"/>
|
||||
<path class="st0" d="M487.6,96.1c-2.6,0-3.1-1.2-3.1-3.6v-5.2h-1.9v-1.5h1.9v-3.4h2.1v3.4h3.4v1.5h-3.4v5.2c0,1.8,0,2.1,1.8,2.1
|
||||
c0.7,0,1.5-0.1,2.2-0.2v1.2C489.6,96,488.6,96.2,487.6,96.1z"/>
|
||||
<path class="st0" d="M498.8,95.3v-1.6l7.3-3.7l-7.3-3.7v-1.5l9.1,4.6v1.5L498.8,95.3z"/>
|
||||
<path class="st0" d="M520.9,96.1c-2.6,0-3.1-1.2-3.1-3.6v-5.2h-1.9v-1.5h1.9v-3.4h2.1v3.4h3.4v1.5h-3.4v5.2c0,1.8,0,2.1,1.8,2.1
|
||||
c0.7,0,1.5-0.1,2.2-0.2v1.2C522.9,96,521.9,96.2,520.9,96.1z"/>
|
||||
<path class="st0" d="M530.6,96.1c-4.1,0-4.5-0.7-4.5-5.1c0-4.6,0.6-5.3,4.5-5.3c3.9,0,4.5,0.7,4.5,5.3
|
||||
C535.1,95.5,534.8,96.1,530.6,96.1z M533,90.9c0-3.8-0.2-3.8-2.2-3.8h-0.1c-2.1,0-2.3,0-2.3,3.8v0.2c0,3.6,0.1,3.6,2.3,3.6h0.1
|
||||
c2.2,0,2.2,0,2.2-3.6V90.9z"/>
|
||||
<path class="st0" d="M548.2,96.1c-4,0-4.4-0.7-4.4-5.1c0-4.6,0.6-5.3,4.4-5.3c0.8,0,1.5,0.1,2.3,0.2v-4.5h2.1v14.3
|
||||
C551.1,96,549.6,96.2,548.2,96.1z M550.5,87.2c-0.7-0.1-1.5-0.1-2.2-0.1c-2.2,0-2.3,0-2.3,3.8v0.2c0,3.6,0.1,3.6,2.3,3.6
|
||||
c0.7,0,1.5,0,2.2-0.2V87.2z"/>
|
||||
<path class="st0" d="M560,96.1c-2.3,0-4.4,0-4.4-3.2c0-3,1.5-3,3.6-3h3v-0.7c0-2.1-0.6-2.2-2.4-2.2c-1.3,0-2.5,0.1-3.8,0.2v-1.2
|
||||
c1.4-0.3,2.7-0.4,4.1-0.4c2.6,0,4.1,0.2,4.1,3.6v6.5C562.9,96,561.5,96.2,560,96.1z M562.3,91.2h-2.6c-1.5,0-2,0-2,1.7
|
||||
s0.7,1.8,2.3,1.8c0.7,0,1.5,0,2.2-0.1L562.3,91.2z"/>
|
||||
<path class="st0" d="M571.7,96.1c-2.6,0-3.1-1.2-3.1-3.6v-5.2h-1.9v-1.5h1.9v-3.4h2.1v3.4h3.4v1.5h-3.4v5.2c0,1.8,0,2.1,1.8,2.1
|
||||
c0.7,0,1.5-0.1,2.2-0.2v1.2C573.8,96,572.7,96.2,571.7,96.1z"/>
|
||||
<path class="st0" d="M581.2,96.1c-2.3,0-4.4,0-4.4-3.2c0-3,1.5-3,3.6-3h3v-0.7c0-2.1-0.6-2.2-2.4-2.2c-1.3,0-2.5,0.1-3.8,0.2v-1.2
|
||||
c1.4-0.3,2.7-0.4,4.1-0.4c2.6,0,4.1,0.2,4.1,3.6v6.5C584.1,96,582.6,96.2,581.2,96.1z M583.4,91.2h-2.6c-1.5,0-2,0-2,1.7
|
||||
s0.7,1.8,2.3,1.8c0.7,0,1.5,0,2.2-0.1L583.4,91.2z"/>
|
||||
<path class="st0" d="M594.4,95.3v-1.6l7.3-3.7l-7.3-3.7v-1.5l9.1,4.6v1.5L594.4,95.3z"/>
|
||||
<path class="st0" d="M616.5,96.1c-2.6,0-3.1-1.2-3.1-3.6v-5.2h-1.9v-1.5h1.9v-3.4h2.1v3.4h3.4v1.5h-3.4v5.2c0,1.8,0,2.1,1.8,2.1
|
||||
c0.7,0,1.5-0.1,2.2-0.2v1.2C618.5,96,617.5,96.2,616.5,96.1z"/>
|
||||
<path class="st0" d="M626.2,96.1c-4.1,0-4.5-0.7-4.5-5.1c0-4.6,0.6-5.3,4.5-5.3c3.9,0,4.5,0.7,4.5,5.3
|
||||
C630.7,95.5,630.4,96.1,626.2,96.1z M628.6,90.9c0-3.8-0.2-3.8-2.2-3.8h-0.1c-2.1,0-2.3,0-2.3,3.8v0.2c0,3.6,0.1,3.6,2.3,3.6h0.1
|
||||
c2.2,0,2.2,0,2.2-3.6V90.9z"/>
|
||||
<path class="st0" d="M643.8,96.1c-2.3,0-4.4,0-4.4-3.2c0-3,1.5-3,3.6-3h3v-0.7c0-2.1-0.6-2.2-2.4-2.2c-1.3,0-2.5,0.1-3.8,0.2v-1.2
|
||||
c1.4-0.3,2.7-0.4,4.1-0.4c2.6,0,4.1,0.2,4.1,3.6v6.5C646.7,96,645.3,96.2,643.8,96.1z M646.1,91.2h-2.6c-1.5,0-2,0-2,1.7
|
||||
s0.7,1.8,2.3,1.8c0.7,0,1.5,0,2.2-0.1L646.1,91.2z"/>
|
||||
<path class="st0" d="M657.9,95.9v-6.7c0-2.1-0.6-2.1-2.2-2.1c-0.7,0-1.5,0-2.2,0.1v8.6h-2.1v-9.8c1.4-0.3,2.8-0.4,4.3-0.4
|
||||
c2.8,0,4.3,0.2,4.3,3.6v6.7H657.9z"/>
|
||||
<path class="st0" d="M667.1,96.1c-2.3,0-4.4,0-4.4-3.2c0-3,1.5-3,3.6-3h3v-0.7c0-2.1-0.6-2.2-2.4-2.2c-1.3,0-2.5,0.1-3.8,0.2v-1.2
|
||||
c1.4-0.3,2.7-0.4,4.1-0.4c2.6,0,4.1,0.2,4.1,3.6v6.5C670,96,668.6,96.2,667.1,96.1z M669.4,91.2h-2.6c-1.5,0-2,0-2,1.7
|
||||
s0.7,1.8,2.3,1.8c0.7,0,1.5,0,2.2-0.1L669.4,91.2z"/>
|
||||
<path class="st0" d="M674.9,95.9V81.4h2.1v14.5H674.9z"/>
|
||||
<path class="st0" d="M685.3,97.4c-0.9,2.7-1.4,3.3-3,3.3c-1,0-2.1-0.1-3.1-0.4V99c0.8,0.1,1.5,0.2,2.3,0.2c1.1,0,1.2-0.1,1.8-2.1
|
||||
l0.4-1.2h-1.2l-3.2-10h2.1l2.7,8.5l2.6-8.5h2.2L685.3,97.4z"/>
|
||||
<path class="st0" d="M695,96.1c-1.4,0-2.8-0.1-4.1-0.4v-1.2c1.1,0.1,2.2,0.2,3.3,0.2c2.2,0,2.7,0,2.7-1.6c0-1.2,0-1.4-1.1-1.6
|
||||
l-2.4-0.3c-2.5-0.4-2.5-1.4-2.5-2.9c0-2.5,1.8-2.8,3.8-2.8c1.3,0,2.6,0.1,3.8,0.4v1.2c-1-0.1-2-0.2-3-0.2c-2,0-2.5,0-2.5,1.4
|
||||
c0,1.1,0,1.2,1.1,1.4l2.4,0.4c2.5,0.4,2.5,1.3,2.5,3C699,95.9,697.8,96.1,695,96.1z"/>
|
||||
<path class="st0" d="M701.9,84v-2.6h2.1V84H701.9z M701.9,95.9v-10h2.1v10H701.9z"/>
|
||||
<path class="st0" d="M711,96.1c-1.4,0-2.8-0.1-4.1-0.4v-1.2c1.1,0.1,2.2,0.2,3.3,0.2c2.2,0,2.7,0,2.7-1.6c0-1.2,0-1.4-1.1-1.6
|
||||
l-2.4-0.3c-2.5-0.4-2.5-1.4-2.5-2.9c0-2.5,1.8-2.8,3.8-2.8c1.3,0,2.6,0.1,3.8,0.4v1.2c-1-0.1-2-0.2-3-0.2c-2,0-2.5,0-2.5,1.4
|
||||
c0,1.1,0,1.2,1.1,1.4l2.4,0.4c2.5,0.4,2.5,1.3,2.5,3C715,95.9,713.8,96.1,711,96.1z"/>
|
||||
<path class="st0" d="M362.6,101.8V76.1h2.1v25.7H362.6z"/>
|
||||
</g>
|
||||
<rect x="58.6" y="56.7" class="st1" width="283.5" height="59.7"/>
|
||||
<g id="LogotypesRZ">
|
||||
<path class="st0" d="M161.5,82.8c0-5.4-3.3-8.3-9.5-8.3c-4,0.1-7.9,0.9-11.6,2.3c-0.5,0.2-0.9,0.7-0.9,1.3v19.3h4
|
||||
c0.7,0,1.2-0.6,1.3-1.2V79.5l0.3-0.1c2.1-0.6,4.2-0.9,6.4-0.8c4.1,0,4.8,1.9,4.8,5.8v13h4c0.7,0,1.2-0.6,1.2-1.2L161.5,82.8z"/>
|
||||
<path class="st0" d="M178.9,74.5c-7.3,0-12.6,5-12.6,11.8c0,7,5.1,11.7,12.6,11.7c7.3,0,12.5-4.9,12.5-11.7
|
||||
C191.4,79.2,186.4,74.5,178.9,74.5z M178.9,94c-4.3,0-7.2-3.1-7.2-7.7c0-4.5,2.9-7.7,7.2-7.7c4.3,0,7.2,3.2,7.2,7.7
|
||||
C186.1,90.9,183.1,94,178.9,94L178.9,94z"/>
|
||||
<path class="st0" d="M208.1,74.5c-3.7,0.1-7.3,0.8-10.8,2.1c-0.5,0.2-0.9,0.7-0.9,1.2V108h0.2l3.9-0.2c0.7,0,1.2-0.6,1.2-1.2
|
||||
c0,0,0,0,0,0v-9.3l0.4,0.1c1.8,0.4,3.5,0.6,5.3,0.6c8.2,0,11.9-6.1,11.9-12.1C219.4,80.3,216.4,74.5,208.1,74.5z M207.1,93.9
|
||||
c-1.7,0-3.4-0.3-5.1-0.7l-0.3-0.1V79.6l0.3-0.1c1.8-0.5,3.6-0.8,5.4-0.9c4.4,0,6.7,2.4,6.7,7.2C214.1,91.1,211.6,93.9,207.1,93.9
|
||||
L207.1,93.9z"/>
|
||||
<path class="st0" d="M234.5,74.5c-2.6,0-5.1,0.3-7.7,0.9c-0.6,0.1-1.1,0.6-1,1.2c0,0,0,0.1,0,0.1l0.4,2.5v0.2h0.2
|
||||
c2.3-0.6,4.6-0.8,7-0.8c3.5,0,6.2,0.3,6.2,4.7v1.1h-0.4c-1.9-0.3-3.8-0.4-5.7-0.5c-4.8,0-10.4,1.2-10.4,7.1c0,4.8,3.6,7.1,10.9,7.1
|
||||
c1.9,0,3.8-0.2,5.7-0.5c1.4-0.2,2.9-0.6,4.3-1c0.5-0.2,0.9-0.7,0.9-1.2l0,0v-12C244.9,77.1,241.8,74.5,234.5,74.5z M239.7,93.8
|
||||
h-0.3c-1.7,0.3-3.4,0.4-5.1,0.4c-5.2,0-5.9-1.8-5.9-3.4c0-2.4,1.9-3.5,5.7-3.5c1.8,0,3.5,0.2,5.2,0.4l0.3,0.1V93.8z"/>
|
||||
<path class="st0" d="M272.8,77.9v-0.3c0-0.6-0.3-1.1-0.9-1.3l-0.5-0.2c-3-1-6.1-1.6-9.2-1.7c-7.8,0-12.4,4.4-12.4,11.9
|
||||
c0,8.5,5.9,11.6,11.5,11.6c1.9,0,3.9-0.3,5.7-0.9l0.5-0.1v11h0.2l3.9-0.2c0.7,0,1.2-0.6,1.2-1.3L272.8,77.9z M267.5,93.1l-0.3,0.1
|
||||
c-1.5,0.4-3.1,0.7-4.7,0.7c-4.9,0-7.4-2.5-7.4-7.5c0-5,2.5-7.8,7.1-7.8c1.7,0,3.3,0.3,4.9,0.7l0.3,0.1L267.5,93.1z"/>
|
||||
<path class="st0" d="M300.4,76.4c0-0.7-0.6-1.3-1.2-1.3c0,0,0,0,0,0h-4v18.6l-0.3,0.1c-1.5,0.2-3,0.4-4.5,0.4
|
||||
c-4.2,0-6.7-1.1-6.7-6.6c0-2.8,0.2-6,0.4-9v-0.2c0.1-1,0.1-1.3,0.1-1.9c0.1-0.7-0.3-1.3-1-1.4c-0.1,0-0.2,0-0.3,0H279v0.2
|
||||
c-0.4,5.4-0.7,9.4-0.7,14c0,6,3.6,8.7,11.4,8.7c3.2-0.1,6.4-0.5,9.6-1.2c0.6-0.1,1-0.6,1-1.2l0,0L300.4,76.4L300.4,76.4z"/>
|
||||
<path class="st0" d="M328.7,85c0-6.3-4.5-10.5-11.2-10.5c-7.3,0-12.2,4.8-12.2,11.9c0,7.1,5.2,11.6,12.9,11.6
|
||||
c2.7,0,5.3-0.5,7.9-1.3l0,0c0.5-0.2,0.9-0.7,0.8-1.2v-0.2l-0.4-2.2v-0.2h-0.2c-2.4,0.7-5,1-7.5,1c-4.6,0-7.3-2-8-5.8l-0.1-0.4h16.6
|
||||
c0.6-0.1,1.1-0.5,1.2-1.1v-0.1C328.7,86,328.7,85.5,328.7,85z M310.7,84.2l0.1-0.4c0.4-3.2,3.2-5.6,6.4-5.3c3.8,0,6.1,2,6.2,5.4
|
||||
v0.4H310.7z"/>
|
||||
</g>
|
||||
<g id="Bildmarke_RZ">
|
||||
<path class="st2" d="M127.7,86.5c-7.8,7-17.9,11-28.4,11.1c-10.5-0.1-20.6-4.1-28.4-11.1c7.8-7,17.9-11,28.4-11.1
|
||||
C109.8,75.5,119.9,79.5,127.7,86.5z"/>
|
||||
<rect x="76.7" y="72.5" transform="matrix(0.7071 -0.7071 0.7071 0.7071 -34.6157 89.4271)" class="st3" width="28" height="28"/>
|
||||
<rect x="93.9" y="72.5" transform="matrix(0.7071 -0.7071 0.7071 0.7071 -29.5558 101.6511)" class="st3" width="28" height="28"/>
|
||||
|
||||
<rect x="91.4" y="78.6" transform="matrix(0.7071 -0.7071 0.7071 0.7071 -32.0816 95.5461)" class="st4" width="15.7" height="15.7"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 9.1 KiB |
BIN
app/static/images/parallax_hq/book_text_read_paper.jpg
Normal file
After Width: | Height: | Size: 1.9 MiB |
BIN
app/static/images/parallax_hq/books_antique_book_old.jpg
Normal file
After Width: | Height: | Size: 6.2 MiB |
BIN
app/static/images/parallax_hq/concept_document_focus_letter.jpg
Normal file
After Width: | Height: | Size: 2.1 MiB |
BIN
app/static/images/parallax_hq/text_data_wide.png
Normal file
After Width: | Height: | Size: 282 KiB |
BIN
app/static/images/parallax_lq/01_books_antique_book_old.jpg
Normal file
After Width: | Height: | Size: 290 KiB |
After Width: | Height: | Size: 100 KiB |
BIN
app/static/images/parallax_lq/03_text_data_wide.png
Normal file
After Width: | Height: | Size: 282 KiB |
BIN
app/static/images/parallax_lq/04_german_text_book_paper.jpg
Normal file
After Width: | Height: | Size: 202 KiB |
BIN
app/static/images/parallax_lq/05_chapter_book_text_tale.jpg
Normal file
After Width: | Height: | Size: 117 KiB |
BIN
app/static/images/parallax_lq/bible_text.jpg
Normal file
After Width: | Height: | Size: 188 KiB |
BIN
app/static/images/parallax_lq/text_data.png
Normal file
After Width: | Height: | Size: 252 KiB |
1
app/static/images/qr_-_inf.svg
Normal file
After Width: | Height: | Size: 177 KiB |
BIN
app/static/images/server_activity.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
app/static/images/sfb_background.jpeg
Normal file
After Width: | Height: | Size: 4.2 KiB |
BIN
app/static/images/workflow.png
Normal file
After Width: | Height: | Size: 1.3 MiB |
3894
app/static/js/darkreader.js
Normal file
36
app/static/js/jsonpatch.min.js
vendored
Normal file
2
app/static/js/list.min.js
vendored
Normal file
1
app/static/js/list.min.js.map
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"list.min.js","sources":["webpack://List/list.min.js"],"mappings":"AAAA","sourceRoot":""}
|
6
app/static/js/materialize.min.js
vendored
Normal file
282
app/static/js/modules/corpus_analysis/client/Client.js
Normal file
@ -0,0 +1,282 @@
|
||||
/**
|
||||
* This class is used to create a Client object.
|
||||
* The client handels the client server communication.
|
||||
* It communicates with the server (e.g. connection or query)
|
||||
* and recieves data from it, if dynamicMode is true.
|
||||
* If dynamicMode is false, the client can also handle data that is already
|
||||
* loaded and is not coming in, in chunks.
|
||||
*/
|
||||
class Client {
|
||||
constructor({corpusId = null,
|
||||
socket = null,
|
||||
logging = true,
|
||||
dynamicMode = true,
|
||||
fullContext = null} = {}) {
|
||||
this.corpusId = corpusId;
|
||||
this.dynamicMode = dynamicMode;
|
||||
this.logging = logging;
|
||||
this.requestQueryProgress = 0;
|
||||
this.socket = socket;
|
||||
this.eventListeners = {};
|
||||
this.isBusy = false;
|
||||
this.fullContext = fullContext;
|
||||
/**
|
||||
* Disables all console logging.
|
||||
* This is global. So every other log message in every other Class or
|
||||
* function used in conjunction with the client either logs or does not
|
||||
* log depending on the logging flag. It is kind of hacky but not bad.
|
||||
* Credits to https://gist.github.com/kmonsoor/0244fdb4ad79a4826371e58a1a5fa984
|
||||
*/
|
||||
if (!logging) {
|
||||
(() => {
|
||||
let console = (window.console = window.console || {});
|
||||
[
|
||||
'assert', 'clear', 'count', 'debug', 'dir', 'dirxml',
|
||||
'error', 'exception', 'group', 'groupCollapsed', 'groupEnd',
|
||||
'info', 'log', 'markTimeline', 'profile', 'profileEnd', 'table',
|
||||
'time', 'timeEnd', 'timeStamp', 'trace', 'warn'
|
||||
].forEach((method) => {
|
||||
console[method] = () => {};
|
||||
});
|
||||
})();
|
||||
}
|
||||
console.info("Client initialized:", this);
|
||||
}
|
||||
/**
|
||||
* Registers one or more event listeners to the Client. Either socket or
|
||||
* custom javascript events. Event listeners are class instances of
|
||||
* ClientEventListener implemented further below.
|
||||
*/
|
||||
setSocketEventListeners(eventListeners) {
|
||||
for (let eventListener of eventListeners) {
|
||||
this.eventListeners[eventListener.type] = eventListener;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the event listeners that have been registered with the function
|
||||
* above so that they will be triggered on their assigned
|
||||
* type strings. Type strings double as the socket event event names or
|
||||
* javascript custom event names.
|
||||
*/
|
||||
loadSocketEventListeners() {
|
||||
for (let [type, listener] of Object.entries(this.eventListeners)) {
|
||||
listener.listenerFunction(type, this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This functions emits the 'notify-view' custom javascript event. This
|
||||
* triggers specific functions in the View depending on the caseIdentifier.
|
||||
* The detail object can hold any type of data the View needs to know about
|
||||
* to represent those to the user.
|
||||
*/
|
||||
notifyView(caseIdentifier, detailObject={}, notificationType='info',
|
||||
raiseModalFeedback=true) {
|
||||
detailObject.caseIdentifier = caseIdentifier;
|
||||
detailObject.client = this;
|
||||
detailObject.notificationType = notificationType;
|
||||
detailObject.raiseModalFeedback = raiseModalFeedback;
|
||||
const event = new CustomEvent('notify-view', { detail: detailObject });
|
||||
console[notificationType]('Client dispatching Notification with details:',
|
||||
detailObject);
|
||||
document.dispatchEvent(event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Connects to the corpus analysis session running on the server side for the
|
||||
* specified corpus via socket.io.
|
||||
*/
|
||||
connect() {
|
||||
console.info('corpus_analysis_init: Client connecting to session via',
|
||||
'socket.emit');
|
||||
this.socket.emit('corpus_analysis_init', this.corpusId);
|
||||
}
|
||||
|
||||
// Gets the meta data of the current corpus.
|
||||
getMetaData() {
|
||||
this.isBusy = true;
|
||||
console.info('corpus_analysis_meta_data: Client getting meta data via',
|
||||
'socket.emit.');
|
||||
this.socket.emit('corpus_analysis_meta_data', this.corpusId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emits query to the server via socket.io. Server will send the results
|
||||
* back.
|
||||
*/
|
||||
query(queryStr) {
|
||||
this.isBusy = true;
|
||||
console.info('corpus_analysis_query: Client sending query via',
|
||||
'socket.emit for the query', queryStr);
|
||||
this.socket.emit('corpus_analysis_query', queryStr);
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests results data either for, 'full-results', 'sub-results' or
|
||||
* 'inspect-results' (resultsType).
|
||||
* Gets full results for evere provided dataIndex (one match).
|
||||
* Full results means with full context. So the Client has to request all
|
||||
* matches from the server again!
|
||||
**/
|
||||
getResultsData(resultsType, dataIndexes, results) {
|
||||
let tmp_first_cpos = [];
|
||||
let tmp_last_cpos = [];
|
||||
let objectKey = '';
|
||||
if (resultsType === 'full-results') {
|
||||
objectKey = 'fullResultsData';
|
||||
} else if (resultsType === 'sub-results') {
|
||||
objectKey = 'subResultsData';
|
||||
} else if (resultsType === 'inspect-results') {
|
||||
objectKey = 'inspectResultsData';
|
||||
}
|
||||
// Delete old data before new data is coming in.
|
||||
results[objectKey].init();
|
||||
for (let dataIndex of dataIndexes) {
|
||||
tmp_first_cpos.push(results.data.matches[dataIndex].c[0]);
|
||||
tmp_last_cpos.push(results.data.matches[dataIndex].c[1]);
|
||||
}
|
||||
this.socket.emit('corpus_analysis_get_match_with_full_context',
|
||||
{type: resultsType,
|
||||
data_indexes: dataIndexes,
|
||||
first_cpos: tmp_first_cpos,
|
||||
last_cpos: tmp_last_cpos,});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets results data either for, 'full-results' or 'sub-results'
|
||||
* Gets results for every provided dataIndex (one match) without full
|
||||
* context. Because no context is needed the results data is gathered locally
|
||||
* from results.data and not from the server.
|
||||
**/
|
||||
getResultsDataWithoutContext(resultsType, dataIndexes, results, resultsList) {
|
||||
this.notifyView('results-data-recieving', {fullContext: false});
|
||||
let objectKey = '';
|
||||
if (resultsType === 'full-results') {
|
||||
console.info('Saving full-results data without full context.');
|
||||
objectKey = 'fullResultsData';
|
||||
} else if (resultsType === 'sub-results') {
|
||||
console.info('Saving sub-results data without full context.');
|
||||
objectKey = 'subResultsData';
|
||||
}
|
||||
// Get matches from results.data.
|
||||
let matches = [];
|
||||
let cpos = [];
|
||||
let match;
|
||||
for (let index of dataIndexes) {
|
||||
match = results.data.matches[index]
|
||||
matches.push(match)
|
||||
// Get cpos from match.
|
||||
let {lc, c, rc} = resultsList.helperCreateCpos(results.data.cpos_ranges,
|
||||
match);
|
||||
cpos.push(...lc);
|
||||
cpos.push(...c);
|
||||
cpos.push(...rc);
|
||||
}
|
||||
// Get cpos_lookups from cposes.
|
||||
let cpos_lookup = {};
|
||||
let textIds = new Set;
|
||||
for (let single_cpos of cpos) {
|
||||
textIds.add(results.data.cpos_lookup[single_cpos].text);
|
||||
Object.assign(cpos_lookup, { [single_cpos]: results.data.cpos_lookup[single_cpos]});
|
||||
}
|
||||
let text = {};
|
||||
let text_lookup = {};
|
||||
for (let id of textIds) {
|
||||
text[id] = results.data.text_lookup[id];
|
||||
Object.assign(text_lookup, text);
|
||||
}
|
||||
/**
|
||||
* Save the data from results.dat either in results.fullResultsData or
|
||||
* results.subResultsData.
|
||||
*/
|
||||
results[objectKey].init();
|
||||
results[objectKey].matches.push(...matches);
|
||||
results[objectKey].addData(cpos_lookup, "cpos_lookup");
|
||||
results[objectKey].addData(text_lookup, "text_lookup");
|
||||
results[objectKey].addData(results.metaData);
|
||||
results[objectKey].query = results.data.query;
|
||||
results[objectKey].corpus_type = resultsType;
|
||||
results[objectKey].match_count = matches.length;
|
||||
results[objectKey].cpos_ranges = results.data.cpos_ranges;
|
||||
results[objectKey].fullContext = false;
|
||||
if (objectKey === 'subResultsData') {
|
||||
// Remove match_count from texts, because they are useless in sub results
|
||||
for (let [key, value] of Object.entries(results[objectKey].text_lookup)) {
|
||||
delete results[objectKey].text_lookup[key].match_count;
|
||||
}
|
||||
}
|
||||
console.info('Results data without context has been saved.', results);
|
||||
this.isBusy = false;
|
||||
this.notifyView('results-data-recieved', {type: resultsType,
|
||||
results: results,
|
||||
fullContext: false});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This class is used to create an event listener listening for socket or
|
||||
* javascript custom events.
|
||||
* Input are an identifying type string, the listener function and callbacks
|
||||
* which will be executed as part of the listener function. The identifying
|
||||
* type string is also used as the socket event or custom javascript event name
|
||||
* identifier.
|
||||
*/
|
||||
class ClientEventListener {
|
||||
constructor(type, listenerFunction) {
|
||||
this.listenerCallbacks = {};
|
||||
this.listenerFunction = listenerFunction;
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
// Registers callbacks to this ClientEventListener.
|
||||
setCallbacks(listenerCallbacks) {
|
||||
for (let listenerCallback of listenerCallbacks) {
|
||||
this.listenerCallbacks[listenerCallback.type] = listenerCallback;
|
||||
}
|
||||
}
|
||||
|
||||
/** Shorthand to execute all registered callbacks with same defaultArgs
|
||||
* in insertion order.
|
||||
* NOTE:
|
||||
* Since ECMAScript 2015, objects do preserve creation order for
|
||||
* string and Symbol keys. In JavaScript engines that comply with the
|
||||
* ECMAScript 2015 spec, iterating over an object with only string keys will
|
||||
* yield the keys in order of insertion.
|
||||
* So all modern Browsers.
|
||||
*/
|
||||
executeCallbacks(defaultArgs) {
|
||||
for (let [type, listenerCallback] of Object.entries(this.listenerCallbacks)) {
|
||||
listenerCallback.callbackFunction(...defaultArgs,
|
||||
...listenerCallback.args);
|
||||
}
|
||||
}
|
||||
// Executes a specific registered callback by provoding a type string.
|
||||
executeCallback(defaultArgs, type) {
|
||||
let listenerCallback = this.listenerCallbacks[type];
|
||||
let args = defaultArgs.concat(listenerCallback.args) ;
|
||||
listenerCallback.callbackFunction(...args);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This class is used to create an ListenerCallback which will be registered
|
||||
* to an ClientEventListener so the listener can invoke the associated
|
||||
* callback functions.
|
||||
*/
|
||||
class ListenerCallback {
|
||||
constructor(type, callbackFunction, argsList) {
|
||||
this.args = argsList;
|
||||
this.callbackFunction = callbackFunction;
|
||||
this.type = type;
|
||||
}
|
||||
}
|
||||
|
||||
// Export Classes from this module.
|
||||
export {
|
||||
Client,
|
||||
ClientEventListener,
|
||||
ListenerCallback,
|
||||
};
|
161
app/static/js/modules/corpus_analysis/client/callbacks.js
Normal file
@ -0,0 +1,161 @@
|
||||
/**
|
||||
* This callback is called on a socket.on "corpus_analysis_send_meta_data".
|
||||
* Handels incoming corpus metadata
|
||||
*/
|
||||
function saveMetaData(...args) {
|
||||
let [payload, client, results, rest] = args;
|
||||
client.notifyView('meta-data-recieving');
|
||||
results.metaData.init(payload)
|
||||
console.info('Metada saved:', results);
|
||||
client.isBusy = false;
|
||||
client.notifyView('meta-data-recieved');
|
||||
}
|
||||
|
||||
/**
|
||||
* This callback should be registered to the SocketEventListener 'recieveQueryStatus'.
|
||||
* It just gets the incoming status data of the issued query
|
||||
* and does some preperation work like hiding or showing elements and deleting
|
||||
* the data from the last query.
|
||||
*/
|
||||
function prepareQueryData(...args) {
|
||||
// deletes old data from query issued before this new query
|
||||
let [payload, client, results, rest] = args;
|
||||
// always initialize the results to delete data from the query issued before
|
||||
results.init();
|
||||
results.data.match_count = payload.match_count;
|
||||
client.requestQueryProgress = 0;
|
||||
client.notifyView('query-data-prepareing', { results: results });
|
||||
}
|
||||
|
||||
/**
|
||||
* This callbacks saves the incoming query data chunks into the model results.
|
||||
*/
|
||||
function saveQueryData(...args) {
|
||||
let [payload, client, results, rest] = args;
|
||||
// Get data matches length before new chunk data is being inserted
|
||||
let dataLength = results.data.matches.length;
|
||||
if (client.dynamicMode) {
|
||||
// Incorporating new chunk data into full results
|
||||
results.data.matches.push(...payload.chunk.matches);
|
||||
results.data.addData(payload.chunk.cpos_lookup, 'cpos_lookup');
|
||||
results.data.addData(payload.chunk.text_lookup, 'text_lookup');
|
||||
/**
|
||||
* Increment match_counts per text in a global results varaible because
|
||||
* they are coming in chunkwise.
|
||||
*/
|
||||
if (payload.chunk.text_lookup) {
|
||||
for (let [text_key, value] of Object.entries(payload.chunk.text_lookup)) {
|
||||
if (!(text_key in results.tmp_match_counts)) {
|
||||
results.tmp_match_counts[text_key] = {match_count: 0};
|
||||
}
|
||||
results.tmp_match_counts[text_key].match_count += payload.chunk.text_lookup[text_key].match_count;
|
||||
}
|
||||
}
|
||||
results.data.cpos_ranges = payload.chunk.cpos_ranges;
|
||||
let queryFormElement = document.querySelector('#query-form');
|
||||
results.data.getQueryStr(queryFormElement);
|
||||
client.requestQueryProgress = payload.progress;
|
||||
client.notifyView('query-data-recieving',
|
||||
{ results: results,
|
||||
client: client,
|
||||
dataLength: dataLength });
|
||||
console.info('Query data chunk saved', results.data);
|
||||
if (client.requestQueryProgress === 100) {
|
||||
client.isBusy = false;
|
||||
// Update text_lookup with tmp_match_counts.
|
||||
for (let [text_key, value] of Object.entries(results.tmp_match_counts)) {
|
||||
results.data.text_lookup[text_key].match_count = results.tmp_match_counts[text_key].match_count;
|
||||
}
|
||||
client.notifyView('query-data-recieved');
|
||||
}
|
||||
} else {
|
||||
results.data.matches.push(...payload.matches);
|
||||
results.data.addData(payload.cpos_lookup, 'cpos_lookup');
|
||||
results.data.addData(payload.text_lookup, 'text_lookup');
|
||||
results.data.cpos_ranges = payload.cpos_ranges;
|
||||
let queryFormElement = document.querySelector('#query-form');
|
||||
results.data.getQueryStr(queryFormElement);
|
||||
client.requestQueryProgress = 100;
|
||||
client.notifyView('query-data-recieving',
|
||||
{ results: results,
|
||||
client: client,
|
||||
dataLength: dataLength });
|
||||
console.info('Query data chunk saved', results.data);
|
||||
if (client.requestQueryProgress === 100) {
|
||||
console.log(results.data);
|
||||
client.notifyView('query-data-recieved');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This callback gets the results data for the export. Either requesting it
|
||||
* whith full context from the server or gets it locally without full context
|
||||
* from the already present results.data. Result data is identified with the
|
||||
* dataIndexes. On index is one match.
|
||||
*/
|
||||
function getResultsData(...args) {
|
||||
let [resultsType, dataIndexes, resultsList, client, results, rest] = args;
|
||||
client.isBusy = true;
|
||||
if (resultsList.exportFullInspectContext.checked
|
||||
|| resultsType === 'inspect-results') {
|
||||
console.info('Get results with full context');
|
||||
client.getResultsData(resultsType, dataIndexes, results);
|
||||
} else {
|
||||
console.info('Get results without full context');
|
||||
client.getResultsDataWithoutContext(resultsType, dataIndexes, results,
|
||||
resultsList);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles incoming results which have been requested via getResultsData(). and
|
||||
* saves the data accorindgly into the results object.
|
||||
*/
|
||||
function saveResultsData(...args) {
|
||||
let [payload, type, client, results, rest] = args;
|
||||
let objectKey = '';
|
||||
if (type === 'full-results') {
|
||||
console.info('Saving full-results data.');
|
||||
objectKey = 'fullResultsData';
|
||||
} else if (type === 'sub-results') {
|
||||
console.info('Saving sub-results data.');
|
||||
objectKey = 'subResultsData';
|
||||
} else if (type = 'inspect-results') {
|
||||
objectKey = 'inspectResultsData';
|
||||
console.info('Saving inspect-results data');
|
||||
}
|
||||
// Save incoming data. Data is incoming one match at a time.
|
||||
results[objectKey].matches.push(...payload.matches);
|
||||
results[objectKey].addData(payload.cpos_lookup, 'cpos_lookup');
|
||||
results[objectKey].addData(payload.text_lookup, 'text_lookup');
|
||||
results[objectKey].addData(results.metaData);
|
||||
results[objectKey].query = results.data.query;
|
||||
results[objectKey].corpus_type = type;
|
||||
results[objectKey].match_count += 1;
|
||||
results[objectKey].cpos_ranges = payload.cpos_ranges;
|
||||
results[objectKey].fullContext = true;
|
||||
console.info('Results data has been saved.', results);
|
||||
// Notify view to update progress bar
|
||||
client.notifyView('results-data-recieving', {type: type,
|
||||
progress: payload.progress})
|
||||
if (payload.progress === 100) {
|
||||
client.isBusy = false;
|
||||
if (objectKey === 'fullResultsData') {
|
||||
// Get match count per text from results.data only for fullResultsData
|
||||
results[objectKey].text_lookup = results.data.text_lookup;
|
||||
}
|
||||
client.notifyView('results-data-recieved', {type: type,
|
||||
results: results,
|
||||
fullContext: true});
|
||||
}
|
||||
}
|
||||
|
||||
// export callbacks
|
||||
export {
|
||||
prepareQueryData,
|
||||
saveMetaData,
|
||||
saveQueryData,
|
||||
getResultsData,
|
||||
saveResultsData,
|
||||
};
|
201
app/static/js/modules/corpus_analysis/client/listeners.js
Normal file
@ -0,0 +1,201 @@
|
||||
/**
|
||||
* This file contains the listener functions which can be assigned to the
|
||||
* coprus_analysis client. So that the incoming data/status informations will
|
||||
* be handled. There are several listeners listening for socket .io events.
|
||||
* Further below one javascript custom event listener is specified. This
|
||||
* listener listens for javascript custom events which are being dispatched by
|
||||
* the View (resultsList).
|
||||
*/
|
||||
|
||||
// Listeners for socket io events
|
||||
|
||||
/**
|
||||
* Recieves a corpus analysis connected signal via socket.io.
|
||||
*/
|
||||
function recieveConnected(type, client) {
|
||||
client.socket.on(type, (response) => {
|
||||
/**
|
||||
* Check if request for session was OK.
|
||||
* If OK execute registered callbacks and notify View.
|
||||
*/
|
||||
if (response.code === 200) {
|
||||
console.group('Connected!')
|
||||
console.info('corpus_analysis_init: Client recieving connected codes',
|
||||
'codes via socket.on');
|
||||
console.info(`corpus_analysis_init: ${response.code} - ${response.msg}`);
|
||||
console.info('corpus_analysis_init: Initialization succeeded');
|
||||
console.info(response);
|
||||
client.notifyView('connected');
|
||||
console.groupEnd();
|
||||
// get meta data immediately
|
||||
client.getMetaData();
|
||||
} else {
|
||||
let errorText = `Error ${response.code} - ${response.msg}`;
|
||||
console.group('Connection failed!');
|
||||
console.error(`corpus_analysis_init: ${errorText}`);
|
||||
client.notifyView('client-failed', { msg: errorText }, 'error');
|
||||
console.groupEnd();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Recieves meta data from the server via socket.io.
|
||||
*/
|
||||
function recieveMetaData(type, client) {
|
||||
client.socket.on(type, (response) => {
|
||||
/**
|
||||
* Check if request for session was OK.
|
||||
* If OK execute registered callbacks and notify View.
|
||||
*/
|
||||
if (response.code === 200) {
|
||||
console.group('Client recieving meta data')
|
||||
console.info('corpus_analysis_meta_data: Client recieving meta data',
|
||||
'via socket.on');
|
||||
console.info(`corpus_analysis_meta_data: ${response.code} - ${response.msg}`);
|
||||
console.info(response);
|
||||
// executing the registered callbacks
|
||||
client.eventListeners[type].executeCallbacks([response.payload]);
|
||||
console.groupEnd();
|
||||
} else {
|
||||
let errorText = `Error ${response.payload.code} - ${response.payload.msg}`;
|
||||
console.group('Failed to recieve meta data.');
|
||||
console.error('corpus_analysis_meta_data: Client failed to recieve',
|
||||
'meta data via socket.on');
|
||||
console.error(`corpus_analysis_meta_data: ${errorText}`);
|
||||
client.notifyView('client-failed', { msg: errorText }, 'error');
|
||||
console.groupEnd();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Recieves the query process status before any actual results are being
|
||||
* transmitted. So it recieves error codes if a query failed or
|
||||
* was invalid etc.
|
||||
* Also prepares the result.jsList for the incoming data.
|
||||
*/
|
||||
function recieveQueryStatus(type, client) {
|
||||
/**
|
||||
* Check if request for session was OK.
|
||||
* If OK execute registered callbacks and notify View.
|
||||
*/
|
||||
client.socket.on(type, (response) => {
|
||||
if (response.code === 200) {
|
||||
console.group('corpus_analysis_query: Client recieving query process',
|
||||
'status via socket.on');
|
||||
console.info(`corpus_analysis_query: ${response.code} - ${response.msg}`);
|
||||
console.info(response);
|
||||
// executing the registered callbacks
|
||||
client.eventListeners[type].executeCallbacks([response.payload]);
|
||||
console.groupEnd();
|
||||
} else {
|
||||
let errorText = `Error ${response.payload.code} - ${response.payload.msg}`;
|
||||
console.group('corpus_analysis_query: Client failed recieving',
|
||||
'query process status via socket.on');
|
||||
if (response.payload.code == 1281) {
|
||||
errorText += ' - Invalid Query';
|
||||
console.error(`corpus_analysis_query: ${errorText}`);
|
||||
client.notifyView('client-failed', { msg: errorText }, 'error', false);
|
||||
} else {
|
||||
console.error(`corpus_analysis_query: ${errorText}`);
|
||||
client.notifyView('client-failed', { msg: errorText }, 'error');
|
||||
}
|
||||
console.groupEnd();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Recieves the query data from the request and handles it.
|
||||
*/
|
||||
function recieveQueryData(type, client) {
|
||||
/**
|
||||
* Check if request for session was OK.
|
||||
* If OK execute registered callbacks and notify View.
|
||||
*/
|
||||
client.socket.on(type, (response) => {
|
||||
if (response.code === 200) {
|
||||
console.group('corpus_analysis_query_results: Recieveing query data')
|
||||
console.info('Client recieving query data via socket.on');
|
||||
console.info('Recieved chunk', response);
|
||||
/**
|
||||
* Execute registered callbacks and notify View.
|
||||
*/
|
||||
client.eventListeners[type].executeCallbacks([response.payload]);
|
||||
console.info('Added chunk data to results.data.');
|
||||
console.groupEnd();
|
||||
} else {
|
||||
let errorText = `Error ${response.payload.code} - ${response.payload.msg}`;
|
||||
console.group('corpus_analysis_query_results: Client failed recieving',
|
||||
'the results via socket.on');
|
||||
console.error(`corpus_analysis_query: ${errorText}`);
|
||||
client.notifyView('client-failed', { msg: errorText }, 'error');
|
||||
console.groupEnd();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Recieves the data requested by the create Results or sub results button
|
||||
*/
|
||||
function recieveResultsData(type, client) {
|
||||
client.socket.on(type, (response) => {
|
||||
/**
|
||||
* Check if request for session was OK.
|
||||
* If OK execute registered callbacks and notify View.
|
||||
*/
|
||||
if (response.code === 200) {
|
||||
console.group('Client recieving results data')
|
||||
console.info('corpus_analysis_get_match_with_full_context: Client recieving results data',
|
||||
'via socket.on');
|
||||
console.info(`corpus_analysis_get_match_with_full_context: ${response.code} - ${response.msg}`);
|
||||
console.info(response);
|
||||
// executing the registered callbacks
|
||||
client.eventListeners[type].executeCallbacks([response.payload,
|
||||
response.type]);
|
||||
console.groupEnd();
|
||||
} else {
|
||||
let errorText = `Error ${response.payload.code} - ${response.payload.msg}`;
|
||||
console.group('Failed to recieve results data.');
|
||||
console.error('corpus_analysis_get_match_with_full_context: Client failed to recieve',
|
||||
'results data via socket.on');
|
||||
console.error(`corpus_analysis_get_match_with_full_context: ${errorText}`);
|
||||
client.notifyView('client-failed', { msg: errorText }, 'error');
|
||||
console.groupEnd();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* This is the javascript custom event listener, listening for events
|
||||
* dispatched by the View.
|
||||
*/
|
||||
function recieveViewNotification(type, client) {
|
||||
document.addEventListener(type, (event) => {
|
||||
let caseIdentifier = event.detail.caseIdentifier;
|
||||
switch(caseIdentifier) {
|
||||
case 'get-results':
|
||||
console.info('Client getting full results for export.');
|
||||
// execute callback or functions
|
||||
client.eventListeners[type].executeCallback([event.detail.resultsType,
|
||||
event.detail.dataIndexes,
|
||||
event.detail.resultsList],
|
||||
caseIdentifier);
|
||||
break
|
||||
default:
|
||||
console.error('Recieved unkown notification case identifier from View');
|
||||
// do something to not crash the analysis session?
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// export listeners from this module
|
||||
export {
|
||||
recieveConnected,
|
||||
recieveMetaData,
|
||||
recieveQueryStatus,
|
||||
recieveQueryData,
|
||||
recieveViewNotification,
|
||||
recieveResultsData,
|
||||
};
|
141
app/static/js/modules/corpus_analysis/model/Results.js
Normal file
@ -0,0 +1,141 @@
|
||||
/**
|
||||
* These classes are implementing the data store of the corpus_analysis
|
||||
* package. If we follow the idea of the Model View Controller Pattern these
|
||||
* classes combined in the Results class define the Model.
|
||||
*/
|
||||
|
||||
// Results class bundleing the different data objects.
|
||||
class Results {
|
||||
constructor() {
|
||||
this.data = new Data();
|
||||
this.metaData = new MetaData();
|
||||
this.fullResultsData = new Data();
|
||||
this.subResultsData = new Data();
|
||||
this.inspectResultsData = new Data();
|
||||
console.info('Initialized the Results object.');
|
||||
}
|
||||
|
||||
// Reset all the data objects in the results class and thus emptying them.
|
||||
init() {
|
||||
this.data.init();
|
||||
this.metaData.init();
|
||||
this.fullResultsData.init();
|
||||
this.subResultsData.init();
|
||||
this.inspectResultsData.init();
|
||||
// Temporarly save match counts per text
|
||||
this.tmp_match_counts = {};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Class that defines the actual data objects holding the results data
|
||||
* requested by the client. Data can be the results of a query, full results
|
||||
* data for the export, sub results data for the export or inspect results data.
|
||||
* All kinds are structured the same way.
|
||||
*/
|
||||
class Data {
|
||||
/**
|
||||
* Sets empty object structure. Also usefull to delete old results.
|
||||
* MatchCount default is 0.
|
||||
*/
|
||||
init(matchCount=0, type="results") {
|
||||
// List of all c with lc and rc CPOS.
|
||||
this.matches = [];
|
||||
/**
|
||||
* CPOS lookup object. CPOS are the key and value are infos about the CPOS
|
||||
* like lemma, ner, pos, text ID etc. CPOS from the matches correspond to
|
||||
* exactly one object in the cpos_lookup.
|
||||
*/
|
||||
this.cpos_lookup = {};
|
||||
/**
|
||||
* Same like above but for text IDs. One CPOS object always has a text ID
|
||||
* referencing on text object in the text_lookup. Text ID is the key. Values
|
||||
* are author, publishing year etc.
|
||||
*/
|
||||
this.text_lookup = {};
|
||||
this.match_count = matchCount;
|
||||
this.corpus_type = 'results';
|
||||
this.cpos_ranges = null;
|
||||
this.query = '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to add json data/object data to this data instance.
|
||||
* If no key is specified the entire data will be assigned to this data
|
||||
* instance.
|
||||
*/
|
||||
addData(jsonData, key=null) {
|
||||
if (key !== null) {
|
||||
Object.assign(this[key], jsonData);
|
||||
} else if (key === null) {
|
||||
Object.assign(this, jsonData)
|
||||
}
|
||||
}
|
||||
|
||||
// Get query as a string from the form Element.
|
||||
getQueryStr(queryFormElement) {
|
||||
// gets query
|
||||
let queryFormData;
|
||||
let queryStr;
|
||||
queryFormData = new FormData(queryFormElement);
|
||||
queryStr = queryFormData.get('query-form-query');
|
||||
this['query'] = queryStr;
|
||||
}
|
||||
|
||||
// Function creates a unique and safe filename for the download.
|
||||
createDownloadFilename(suffix) {
|
||||
let today = new Date();
|
||||
let currentDate = `${today.getUTCFullYear()}` +
|
||||
`-${(today.getUTCMonth() + 1)}` +
|
||||
`-${today.getUTCDate()}`;
|
||||
let currentTime = `${today.getUTCHours()}h` +
|
||||
`${today.getUTCMinutes()}m` +
|
||||
`${today.getUTCSeconds()}s`;
|
||||
let safeFilename = this.query.replace(/[^a-z0-9_-]/gi, "_");
|
||||
let resultFilename = `UTC-${currentDate}_${currentTime}_${safeFilename}_${suffix}`;
|
||||
return resultFilename
|
||||
}
|
||||
/**
|
||||
* Function to download data as Blob created from string.
|
||||
* Should be private but that is not yet a feature of javascript 08.04.2020
|
||||
*/
|
||||
download(downloadElement, dataStr, filename, type, filenameSlug) {
|
||||
filename += filenameSlug;
|
||||
let file = new Blob([dataStr], {type: type});
|
||||
var url = URL.createObjectURL(file);
|
||||
downloadElement.href = url;
|
||||
downloadElement.download = filename;
|
||||
}
|
||||
|
||||
// Function to download the results as JSON.
|
||||
downloadJSONRessource(resultFilename, downloadData, downloadElement) {
|
||||
/**
|
||||
* Stringify JSON object for json download.
|
||||
* Use tabs to save some space.
|
||||
*/
|
||||
let dataStr = JSON.stringify(downloadData, undefined, "\t");
|
||||
// Start actual download
|
||||
this.download(downloadElement, dataStr, resultFilename, "text/json", ".json")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Similar to the data class but just intended for meta data about the current
|
||||
* corpus the client is working with.
|
||||
*/
|
||||
class MetaData {
|
||||
// Sets empty object structure when no input is given.
|
||||
// if json object like input is given class fields are created from this
|
||||
init(json={}) {
|
||||
Object.assign(this, json);
|
||||
}
|
||||
}
|
||||
|
||||
// Export the classes
|
||||
export {
|
||||
Results,
|
||||
Data,
|
||||
MetaData
|
||||
};
|
837
app/static/js/modules/corpus_analysis/view/ResultsView.js
Normal file
@ -0,0 +1,837 @@
|
||||
/**
|
||||
* This class implements a ViewEventListener that is listening for the
|
||||
* specified
|
||||
*/
|
||||
class ViewEventListener {
|
||||
constructor(type, listenerFunction, args=[]) {
|
||||
this.listenerFunction = listenerFunction;
|
||||
this.type = type;
|
||||
this.args = args;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This class is implements a View which handles the reprensentation of the
|
||||
* data that has been fetched by the Client of the corpus_analysis. This view
|
||||
* only handles how the data is shown to the user. View extends the list.js
|
||||
* List class.
|
||||
*/
|
||||
class ResultsList extends List {
|
||||
/**
|
||||
* If no options are given when a new instance of this class is created
|
||||
* the options below are used.
|
||||
*/
|
||||
static options = {
|
||||
page: 30,
|
||||
pagination: [{
|
||||
name: "paginationTop",
|
||||
paginationClass: "paginationTop",
|
||||
innerWindow: 8,
|
||||
outerWindow: 1
|
||||
}, {
|
||||
paginationClass: "paginationBottom",
|
||||
innerWindow: 8,
|
||||
outerWindow: 1
|
||||
}],
|
||||
valueNames: ["titles", "lc", "c", "rc", {data: ["index"]}],
|
||||
item: `<span></span>`
|
||||
};
|
||||
constructor(idOrElement, options) {
|
||||
super(idOrElement, options);
|
||||
/**
|
||||
* All span tokens which are holding events if expert
|
||||
* mode is on. Collected here to delete later on.
|
||||
*/
|
||||
this.eventTokens = {};
|
||||
/**
|
||||
* All token elements which have added classes like chip and hoverable for
|
||||
* expert view. Collected here to delete later on.
|
||||
*/
|
||||
this.currentExpertTokenElements = {};
|
||||
/**
|
||||
* Holds True/false for check buttons used to add matches to sub-results.
|
||||
* If checked, it is True. If unchecked, it is false. Buttons for this
|
||||
* have the class add. The ittle round check buttons to add matches to sub
|
||||
* results. Key is match index. Value is true or false as mentioned above.
|
||||
*/
|
||||
this.subResultsIndexes = {};
|
||||
// ViewEventListeners listening for client notifications.
|
||||
this.notificationListeners = {};
|
||||
this.knownHTMLElements = new Set();
|
||||
}
|
||||
/**
|
||||
* Function to clear/reset some class field values. Usefull if a new query
|
||||
* hase been issued by the user.
|
||||
*/
|
||||
resetFields() {
|
||||
this.subResultsIndexes = {};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Function that takes one or more query selector
|
||||
* strings in an array as an input. The function then creates a
|
||||
* class field in the ResultsList object with the query selector
|
||||
* string as the key. The selector will be converted to a valid JavaScript
|
||||
* Field name i. e. #html-id-string -> this.htmlIdString
|
||||
* The value will be the identifed element or elements fetched with the
|
||||
* querySelector respectively querySelectorAll method.
|
||||
* If the query selector is passed as an Array of length 2, with the second
|
||||
* element defining modal options, teh identified element will be initialized
|
||||
* as a modal with the given options.
|
||||
*/
|
||||
getHTMLElements(arrayOfSelectors) {
|
||||
for (let selector of arrayOfSelectors) {
|
||||
// Check if identified Element should be initialized as a modal.
|
||||
let modalInit = false;
|
||||
let options;
|
||||
if (Array.isArray(selector)) {
|
||||
options = selector[1];
|
||||
selector = selector[0];
|
||||
modalInit = true;
|
||||
}
|
||||
// Check if the current selector has already been used.
|
||||
if (this.knownHTMLElements.has(selector)) {
|
||||
continue;
|
||||
} else {
|
||||
// Get element or elements.
|
||||
let element;
|
||||
let elements;
|
||||
if (selector.startsWith('#')) {
|
||||
element = document.querySelector(selector);
|
||||
} else {
|
||||
elements = document.querySelectorAll(selector);
|
||||
elements = [...elements];
|
||||
}
|
||||
// Create valid javascript instance field name.
|
||||
let cleanKey = [];
|
||||
selector = selector.replace(/_/g, '-');
|
||||
selector.match(/\w+/g).forEach((word) => {
|
||||
let tmp = word[0].toUpperCase() + word.slice(1);
|
||||
cleanKey.push(tmp);
|
||||
});
|
||||
cleanKey[0] = cleanKey[0].toLowerCase();
|
||||
cleanKey = cleanKey.join('');
|
||||
this[cleanKey] = element ? element: elements;
|
||||
// Initialize current element as modal if modalInit true.
|
||||
if (modalInit) {
|
||||
this[cleanKey] = M.Modal.init(this[cleanKey], options);
|
||||
}
|
||||
}
|
||||
// Add current selector to knwonHTMLElements.
|
||||
this.knownHTMLElements.add(selector);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register ViewEventListeners to the ResultsList. Which will listen for
|
||||
* the specified event.
|
||||
*/
|
||||
setViewEventListeners(notificationListeners) {
|
||||
for (let notificationListener of notificationListeners) {
|
||||
this.notificationListeners[notificationListener.type] = notificationListener;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the ViewEventListeners so that hey will be listening to their
|
||||
* assigned custom events.
|
||||
*/
|
||||
loadViewEventListeners() {
|
||||
for (let [type, listener] of Object.entries(this.notificationListeners)) {
|
||||
if (listener.args.length > 0) {
|
||||
listener.listenerFunction(...listener.args);
|
||||
} else {
|
||||
listener.listenerFunction(type, this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This functions sends events to the Client to trigger specific functions to
|
||||
* trigger new data requests from the server.
|
||||
*/
|
||||
notifyClient(caseIdentifier, detailObject={}) {
|
||||
detailObject.caseIdentifier = caseIdentifier;
|
||||
const event = new CustomEvent('notify-client', { detail: detailObject });
|
||||
console.info('Client dispatching Notification:', caseIdentifier);
|
||||
document.dispatchEvent(event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates cpos either from ranges or not.
|
||||
*/
|
||||
helperCreateCpos(cpos_values) {
|
||||
let lc, c, rc;
|
||||
/**
|
||||
* Python range like function from MDN: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/from#Sequence_generator_(range)
|
||||
*/
|
||||
const range = (start, stop, step) => Array.from({ length: (stop - start) / step + 1}, (_, i) => start + (i * step));
|
||||
lc = cpos_values.lc ? range(cpos_values.lc[0], cpos_values.lc[1], 1) : [];
|
||||
c = range(cpos_values.c[0], cpos_values.c[1], 1);
|
||||
rc = cpos_values.rc ? range(cpos_values.rc[0], cpos_values.rc[1], 1) : [];
|
||||
return {lc: lc, c: c, rc: rc};
|
||||
}
|
||||
|
||||
// Get display options from display options form element.
|
||||
static getDisplayOptions(htmlId) {
|
||||
// gets display options parameters
|
||||
let displayOptionsFormElement = document.getElementById(htmlId);
|
||||
let displayOptionsFormData = new FormData(displayOptionsFormElement);
|
||||
let displayOptionsData =
|
||||
{
|
||||
"resultsPerPage": displayOptionsFormData.get("display-options-form-results_per_page"),
|
||||
"resultsContex": displayOptionsFormData.get("display-options-form-result_context"),
|
||||
"expertMode": displayOptionsFormData.get("display-options-form-expert_mode")
|
||||
};
|
||||
return displayOptionsData
|
||||
}
|
||||
/**
|
||||
* Used in addToSubResults and inspect to toggle the design of the check
|
||||
* buttons according to its checked unchecked status.
|
||||
*/
|
||||
helperActivateAddBtn(btn) {
|
||||
btn.classList.remove("corpus-analysis-color.lighten");
|
||||
btn.classList.add("green");
|
||||
btn.textContent = "check";
|
||||
}
|
||||
|
||||
/**
|
||||
* Used in addToSubResults and inspect to toggle the design of the check
|
||||
* buttons according to its checked unchecked status.
|
||||
*/
|
||||
helperDeactivateAddBtn(btn) {
|
||||
btn.classList.remove("green");
|
||||
btn.classList.add("corpus-analysis-color.lighten");
|
||||
btn.textContent = "add";
|
||||
}
|
||||
/**
|
||||
* This function is invoked when the users adds or removes a match using the
|
||||
* add-btn (+ button/or green checkmark) to/from sub-results. When the button
|
||||
* is clicked the function checks if the current dataIndex ID is already
|
||||
* saved in subResultsIndexes or not. If it is not the dataIndex will be used
|
||||
* as a key in subResultsIndexes with the value true. If it is already added
|
||||
* the entry with the key dataIndex will be deleted from subResultsIndexes.
|
||||
* Visual feedback (green checkmark if a match has been added etc.) is also
|
||||
* handled on the basis of the information stored in subResultsIndexes.
|
||||
*/
|
||||
addToSubResults(dataIndex, client, tableCall=true) {
|
||||
let toShowArray;
|
||||
dataIndex = parseInt(dataIndex);
|
||||
if (!this.subResultsIndexes[dataIndex]
|
||||
|| this.subResultsIndexes[dataIndex] === undefined) {
|
||||
// add button is activated because status is false or undefined
|
||||
this.helperActivateAddBtn(event.target);
|
||||
this.subResultsIndexes[dataIndex] = true;
|
||||
toShowArray = Object.keys(this.subResultsIndexes).map(index => parseInt(index));
|
||||
// Add 1 because indexes are zero based. User sees 1 based numbering.
|
||||
toShowArray = toShowArray.map(index => index + 1);
|
||||
// Allways sort the shown indexes for the user if new match is added.
|
||||
toShowArray = toShowArray.sort(function(a, b){return a-b});
|
||||
this.subResultsIndexesDisplay.textContent = toShowArray.join(', ');
|
||||
M.textareaAutoResize(this.subResultsIndexesDisplay);
|
||||
this.nrMarkedMatches.textContent = Object.keys(this.subResultsIndexes).length;
|
||||
} else if (this.subResultsIndexes[dataIndex]) {
|
||||
// add button is deactivated because status is true
|
||||
this.helperDeactivateAddBtn(event.target);
|
||||
delete this.subResultsIndexes[dataIndex];
|
||||
toShowArray = Object.keys(this.subResultsIndexes).map(index => parseInt(index));
|
||||
// Add 1 because indexes are zero based. User sees 1 based numbering.
|
||||
toShowArray = toShowArray.map(index => index + 1);
|
||||
// Allways sort the shown indexes for the user if new match is added.
|
||||
toShowArray = toShowArray.sort(function(a, b){return a-b});
|
||||
this.subResultsIndexesDisplay.textContent = toShowArray.join(', ');
|
||||
this.nrMarkedMatches.textContent = Object.keys(this.subResultsIndexes).length;
|
||||
M.textareaAutoResize(this.subResultsIndexesDisplay);
|
||||
}
|
||||
// Toggles create button according to the number of ids in addToSubResultsIdsToShow
|
||||
if (Object.keys(this.subResultsIndexes).length > 0 && !client.isBusy) {
|
||||
this.subResultsCreate.classList.toggle('disabled', false);
|
||||
} else if (Object.keys(this.subResultsIndexes).length === 0) {
|
||||
this.subResultsCreate.classList.toggle('disabled', true);
|
||||
}
|
||||
/**
|
||||
* After a match as been added or removed the export button will be
|
||||
* hidden because the sub-results have been altered and have to be built
|
||||
* again. Thus subResultsCreateElement has to be shown again.
|
||||
*/
|
||||
this.subResultsExport.classList.add("hide");
|
||||
this.subResultsCreate.classList.remove("hide");
|
||||
/**
|
||||
* Also activate/deactivate buttons in the table/resultsList accordingly
|
||||
* if button in inspect was activated/deactivated.
|
||||
* This part only runs if tableCall is set to false when this function is
|
||||
* called.
|
||||
*/
|
||||
if (!tableCall) {
|
||||
this.getHTMLElements(['#query-results-table']);
|
||||
let container = this.queryResultsTable.querySelector(`[data-index="${dataIndex}"]`);
|
||||
let tableAddBtn = container.querySelector('.add-btn'); // gets the add button from the list view
|
||||
if (this.subResultsIndexes[dataIndex]) {
|
||||
this.helperActivateAddBtn(tableAddBtn);
|
||||
} else {
|
||||
this.helperDeactivateAddBtn(tableAddBtn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle inspect buttons depending on the Client status
|
||||
toggleInspectButtons(client) {
|
||||
if (!client.isBusy) {
|
||||
this.activateInspectButtons();
|
||||
} else if (client.isBusy) {
|
||||
this.deactivateInspectButtons();
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function. Should be private if feature is available.
|
||||
activateInspectButtons() {
|
||||
let inspectBtnElements;
|
||||
inspectBtnElements = document.querySelectorAll('.inspect');
|
||||
for (let inspectBtn of inspectBtnElements) {
|
||||
inspectBtn.classList.toggle('disabled', false);
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function. Should be private if feature is available.
|
||||
deactivateInspectButtons() {
|
||||
let inspectBtnElements;
|
||||
inspectBtnElements = document.querySelectorAll('.inspect');
|
||||
for (let inspectBtn of inspectBtnElements) {
|
||||
inspectBtn.classList.toggle('disabled', true);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Function to inspect one match in more detail (Showing more context).
|
||||
* If in dynamic mode the view notifies the client to requests the new
|
||||
* context for the one match identified by the given dataIndex.
|
||||
* If not in dynamic mode the the needed context will be gathered from the
|
||||
* already present results in results.data.
|
||||
*/
|
||||
inspect(client, results, dataIndex, type) {
|
||||
// initialize context modal
|
||||
this.getHTMLElements([
|
||||
['#context-modal', true],
|
||||
'#context-results',
|
||||
'#create-inspect-menu',
|
||||
'#create-from-inspect',
|
||||
]);
|
||||
// Clear fields from old data on every new inspect() call.
|
||||
this.contextId = dataIndex[0];
|
||||
this.contextResults.innerHTML = '';
|
||||
// Open modal.
|
||||
this.contextModal.open();
|
||||
this.contextResults.insertAdjacentHTML('afterbegin', `
|
||||
<div class="progress">
|
||||
<div class="indeterminate"></div>
|
||||
</div>
|
||||
`);
|
||||
if (client.dynamicMode) {
|
||||
// Notify Client to get results from server.
|
||||
this.notifyClient('get-results', {resultsType: 'inspect-results',
|
||||
dataIndexes: dataIndex,
|
||||
resultsList: this});
|
||||
} else {
|
||||
// Gather results data from already present data.
|
||||
results.inspectResultsData.matches = [results.data.matches[dataIndex[0]]];
|
||||
results.inspectResultsData.cpos_ranges = results.data.cpos_ranges;
|
||||
this.showMatchContext(results, client)
|
||||
}
|
||||
// Match nr for user to display derived from data_index.
|
||||
let contextMatchNrElement = document.getElementById("context-match-nr");
|
||||
contextMatchNrElement.textContent = this.contextId + 1;
|
||||
// Add the add button to add this match to sub results with onclick event.
|
||||
let classes = `btn-floating btn waves-effect` +
|
||||
` waves-light corpus-analysis-color.lighten right`
|
||||
let addToSubResultsIdsBtn = document.createElement("a");
|
||||
addToSubResultsIdsBtn.setAttribute("class", classes + ` add`);
|
||||
addToSubResultsIdsBtn.innerHTML = '<i class="material-icons">add</i>';
|
||||
addToSubResultsIdsBtn.onclick= () => {
|
||||
this.addToSubResults(dataIndex[0], client, false)
|
||||
};
|
||||
/**
|
||||
* Checks if the match has or has not been added to sub results yet.
|
||||
* Sets the color and status of the button accordingly.
|
||||
*/
|
||||
if (this.subResultsIndexes[dataIndex[0]]) {
|
||||
this.helperActivateAddBtn(addToSubResultsIdsBtn.firstElementChild);
|
||||
} else if (!this.subResultsIndexes[dataIndex[0]]) {
|
||||
this.helperDeactivateAddBtn(addToSubResultsIdsBtn.firstElementChild);
|
||||
}
|
||||
this.createInspectMenu.innerHTML = '';
|
||||
this.createInspectMenu.appendChild(addToSubResultsIdsBtn);
|
||||
// Hide create menu if not in dynamic mode.
|
||||
if (!client.dynamicMode) {
|
||||
this.createFromInspect.classList.add('hide');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Create Element from HTML String. Helper function should be private.
|
||||
* https://stackoverflow.com/questions/494143/creating-a-new-dom-element-from-an-html-string-using-built-in-dom-methods-or-pro/35385518#35385518
|
||||
*/
|
||||
HTMLTStrToElement(htmlStr) {
|
||||
let template = document.createElement("template");
|
||||
htmlStr = htmlStr.trim();
|
||||
template.innerHTML = htmlStr;
|
||||
return template.content.firstChild;
|
||||
}
|
||||
/**
|
||||
* Used either as a callback if the client has been notified to get new
|
||||
* results with new full context. Or just directly invoced as a function
|
||||
* with the according input data.
|
||||
*/
|
||||
showMatchContext(results, client) {
|
||||
this.getHTMLElements([
|
||||
'#context-results',
|
||||
'#inspect-display-options-form-expert_mode_inspect',
|
||||
'#inspect-display-options-form-highlight_sentences',
|
||||
'#context-sentences'
|
||||
])
|
||||
let uniqueS = new Set();
|
||||
let uniqueContextS = new Set();
|
||||
let {lc, c, rc} = this.helperCreateCpos(results.inspectResultsData.matches[0]);
|
||||
// Create sentence strings as tokens.
|
||||
let tokenHTMLArray = [];
|
||||
let htmlTokenStr = ``;
|
||||
let tokenHTMlElement;
|
||||
let token;
|
||||
for (let cpos of lc) {
|
||||
if (client.dynamicMode) {
|
||||
token = results.inspectResultsData.cpos_lookup[cpos];
|
||||
// If client is not in dynamic mode use cpos_lookup from results.data
|
||||
} else {
|
||||
token = results.data.cpos_lookup[cpos];
|
||||
}
|
||||
uniqueS.add(token.s)
|
||||
htmlTokenStr = `<span class="token"` +
|
||||
`data-sid="${token.s}"` +
|
||||
`data-cpos="${cpos}">` +
|
||||
`${token.word}` +
|
||||
`</span>`;
|
||||
tokenHTMlElement = this.HTMLTStrToElement(htmlTokenStr)
|
||||
tokenHTMLArray.push(tokenHTMlElement);
|
||||
}
|
||||
for (let cpos of c) {
|
||||
if (client.dynamicMode) {
|
||||
token = results.inspectResultsData.cpos_lookup[cpos];
|
||||
// If client is not in dynamic mode use cpos_lookup from results.data
|
||||
} else {
|
||||
token = results.data.cpos_lookup[cpos];
|
||||
}
|
||||
uniqueContextS.add(token.s);
|
||||
uniqueS.add(token.s);
|
||||
htmlTokenStr = `<span class="token bold light-green"` +
|
||||
`data-sid="${token.s}"` +
|
||||
`data-cpos="${cpos}"` +
|
||||
`style="text-decoration-line: underline;">` +
|
||||
`${token.word}` +
|
||||
`</span>`;
|
||||
tokenHTMlElement = this.HTMLTStrToElement(htmlTokenStr)
|
||||
tokenHTMLArray.push(tokenHTMlElement);
|
||||
}
|
||||
results.inspectResultsData["context_s_ids"] = Array.from(uniqueContextS);
|
||||
for (let cpos of rc) {
|
||||
if (client.dynamicMode) {
|
||||
token = results.inspectResultsData.cpos_lookup[cpos];
|
||||
// If client is not in dynamic mode use cpos_lookup from results.data
|
||||
} else {
|
||||
token = results.data.cpos_lookup[cpos];
|
||||
}
|
||||
uniqueS.add(token.s)
|
||||
htmlTokenStr = `<span class="token"` +
|
||||
`data-sid="${token.s}"` +
|
||||
`data-cpos="${cpos}">` +
|
||||
`${token.word}` +
|
||||
`</span>`;
|
||||
tokenHTMlElement = this.HTMLTStrToElement(htmlTokenStr)
|
||||
tokenHTMLArray.push(tokenHTMlElement);
|
||||
}
|
||||
// Remove loading indeterminate HTML before context is inserted.
|
||||
this.contextResults.innerHTML = '';
|
||||
for (let sId of uniqueS) {
|
||||
let htmlSentence = `<span class="sentence" data-sid="${sId}"></span>`;
|
||||
let sentenceElement = this.HTMLTStrToElement(htmlSentence);
|
||||
for (let tokenElement of tokenHTMLArray) {
|
||||
if (tokenElement.dataset.sid == sId) {
|
||||
sentenceElement.appendChild(tokenElement);
|
||||
sentenceElement.insertAdjacentHTML("beforeend", ` `);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
this.contextResults.appendChild(sentenceElement);
|
||||
}
|
||||
// Add expert mode switch event for the modal to toggle expert mode.
|
||||
this.inspectDisplayOptionsFormExpertModeInspect.onchange = (event) => {
|
||||
if (event.target.checked) {
|
||||
this.expertModeOn("context-results", results);
|
||||
} else {
|
||||
this.expertModeOff("context-results")
|
||||
}
|
||||
};
|
||||
// Add switch event to toggle Sentence highlighting.
|
||||
this.inspectDisplayOptionsFormHighlightSentences.onchange = (event) => {
|
||||
if (event.target.checked) {
|
||||
this.higlightContextSentences();
|
||||
} else {
|
||||
this.unhighlightContextSentences();
|
||||
}
|
||||
};
|
||||
// Add range event to change nr of context sentences.
|
||||
this.contextSentences.onchange = (event) => {
|
||||
this.changeSentenceContext(event.target.value);
|
||||
}
|
||||
/**
|
||||
* Checks on new modal opening if switches are checked
|
||||
* if switches are checked functions are executed.
|
||||
*/
|
||||
if (this.inspectDisplayOptionsFormExpertModeInspect.checked) {
|
||||
this.expertModeOn("context-results", results);
|
||||
}
|
||||
if (this.inspectDisplayOptionsFormHighlightSentences.checked) {
|
||||
this.higlightContextSentences();
|
||||
}
|
||||
/**
|
||||
* Checks the value of the number of sentences to show on modal opening
|
||||
* sets context sentences accordingly
|
||||
*/
|
||||
this.changeSentenceContext(this.contextSentences.value);
|
||||
}
|
||||
|
||||
// Splits context text into sentences based on spacy sentence split
|
||||
higlightContextSentences() {
|
||||
let sentences = document.getElementById("context-results").getElementsByClassName("sentence");
|
||||
for (let s of sentences) {
|
||||
s.insertAdjacentHTML("beforeend", `<span><br><br></span>`)
|
||||
}
|
||||
}
|
||||
// Reverse operation of above function.
|
||||
unhighlightContextSentences() {
|
||||
let sentences = document.getElementById("context-results").getElementsByClassName("sentence");
|
||||
let br;
|
||||
for (let s of sentences) {
|
||||
br = s.lastChild;
|
||||
br.remove();
|
||||
}
|
||||
}
|
||||
|
||||
// Changes how many context sentences in inspect view are shown.
|
||||
changeSentenceContext(sValue, maxSValue=10) {
|
||||
sValue = maxSValue - sValue;
|
||||
// console.log(sValue);
|
||||
let sentences = document.getElementById("context-results").getElementsByClassName("sentence");
|
||||
let array = Array.from(sentences);
|
||||
let toHideArray;
|
||||
let toShowArray;
|
||||
if (sValue != 0) {
|
||||
toHideArray = array.slice(0, sValue).concat(array.slice(-(sValue)));
|
||||
toShowArray = array.slice(sValue, 9).concat(array.slice(9, -(sValue)))
|
||||
} else {
|
||||
toHideArray = [];
|
||||
toShowArray = array;
|
||||
}
|
||||
for (let s of toHideArray) {
|
||||
s.classList.add("hide");
|
||||
}
|
||||
for (let s of toShowArray) {
|
||||
s.classList.remove("hide");
|
||||
}
|
||||
}
|
||||
|
||||
// ###### Display options functions changing how results are being displayed ######
|
||||
|
||||
/**
|
||||
* Event function that changes the shown hits per page.
|
||||
* Just alters the resultsList.page property.
|
||||
*/
|
||||
changeHitsPerPage(client, results) {
|
||||
this.page = this.displayOptionsFormResultsPerPage.value;
|
||||
this.update();
|
||||
this.changeContext();
|
||||
this.toggleInspectButtons(client);
|
||||
if (this.displayOptionsFormExpertMode.checked) {
|
||||
this.expertModeOn('query-display', results);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Event function triggered on context select change also if pagination is
|
||||
* clicked.
|
||||
*/
|
||||
changeContext() {
|
||||
let newContextValue = this.displayOptionsFormResultContext.value;
|
||||
let lc = document.querySelectorAll(".left-context");
|
||||
let rc = document.querySelectorAll(".right-context");
|
||||
for (let element of lc) {
|
||||
let arrayLc = Array.from(element.childNodes);
|
||||
for (let element of arrayLc.reverse().slice(newContextValue)) {
|
||||
element.classList.add("hide");
|
||||
}
|
||||
for (let element of arrayLc.slice(0, newContextValue)) {
|
||||
element.classList.remove("hide");
|
||||
}
|
||||
}
|
||||
for (let element of rc) {
|
||||
let arrayRc = Array.from(element.childNodes);
|
||||
for (let element of arrayRc.slice(newContextValue)) {
|
||||
element.classList.add("hide");
|
||||
}
|
||||
for (let element of arrayRc.slice(0, newContextValue)) {
|
||||
element.classList.remove("hide");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ###### Expert view event functions ######
|
||||
|
||||
// Function to create a tooltip for the current hovered token.
|
||||
tooltipEventCreate(event, results) {
|
||||
let token = results.data.cpos_lookup[event.target.dataset.cpos];
|
||||
if (!token) {
|
||||
token = results.inspectResultsData.cpos_lookup[event.target.dataset.cpos];
|
||||
}
|
||||
this.currentTooltipElement = M.Tooltip.init(event.target, {
|
||||
'html': `<table>
|
||||
<tr>
|
||||
<th>Token information</th>
|
||||
<th>Source information</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="left-align">
|
||||
Word: ${token.word}<br>
|
||||
Lemma: ${token.lemma}<br>
|
||||
POS: ${token.pos}<br>
|
||||
Simple POS: ${token.simple_pos}<br>
|
||||
NER: ${token.ner}
|
||||
</td>
|
||||
<td class="left-align">
|
||||
Title: ${results.data.text_lookup[token.text].title}
|
||||
<br>
|
||||
Author: ${results.data.text_lookup[token.text].author}
|
||||
<br>
|
||||
Publishing year: ${results.data.text_lookup[token.text].publishing_year}
|
||||
</td>
|
||||
</tr>
|
||||
</table>`
|
||||
}
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Function to destroy the current Tooltip for the current hovered tooltip
|
||||
* on mouse leave
|
||||
*/
|
||||
tooltipEventDestroy(event) {
|
||||
this.currentTooltipElement.destroy();
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn the expert mode on for all tokens in the DOM element identified by
|
||||
* its htmlID.
|
||||
*/
|
||||
expertModeOn(htmlId, results) {
|
||||
if (!Array.isArray(this.currentExpertTokenElements[htmlId])) {
|
||||
this.currentExpertTokenElements[htmlId] = [];
|
||||
}
|
||||
let container = document.getElementById(htmlId);
|
||||
let tokens = container.querySelectorAll('span.token');
|
||||
this.currentExpertTokenElements[htmlId].push(...tokens);
|
||||
this.eventTokens[htmlId] = [];
|
||||
for (let tokenElement of this.currentExpertTokenElements[htmlId]) {
|
||||
tokenElement.classList.add('chip', 'hoverable', 'expert-view');
|
||||
const eventCreate = (event, arg) => this.tooltipEventCreate(event, arg);
|
||||
tokenElement.onmouseover = (event) => eventCreate(event, results);
|
||||
tokenElement.onmouseout = (event) => this.tooltipEventDestroy(event);
|
||||
this.eventTokens[htmlId].push(tokenElement);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn the expert mode off for all tokens in the DOM element identified by
|
||||
* its htmlID.
|
||||
*/
|
||||
expertModeOff(htmlId) {
|
||||
if (!Array.isArray(this.currentExpertTokenElements[htmlId])) {
|
||||
this.currentExpertTokenElements[htmlId] = [];
|
||||
}
|
||||
if (!Array.isArray(this.eventTokens[htmlId])) {
|
||||
this.eventTokens[htmlId] = [];
|
||||
}
|
||||
for (let tokenElement of this.currentExpertTokenElements[htmlId]) {
|
||||
tokenElement.classList.remove("chip", "hoverable", "expert-view");
|
||||
}
|
||||
this.currentExpertTokenElements[htmlId] = [];
|
||||
|
||||
for (let eventToken of this.eventTokens[htmlId]) {
|
||||
eventToken.onmouseover = '';
|
||||
eventToken.onmouseout = '';
|
||||
}
|
||||
this.eventTokens[htmlId] = [];
|
||||
}
|
||||
|
||||
createResultRowElement(item, chunk, client, imported=false) {
|
||||
// Gather values from item.
|
||||
let values = item.values();
|
||||
let {lc, c, rc} = this.helperCreateCpos(values)
|
||||
// Get infos for full match row.
|
||||
let matchRowElement = document.createElement("tr");
|
||||
matchRowElement.setAttribute("data-index", values.index)
|
||||
let lcCellElement = document.createElement("td");
|
||||
lcCellElement.classList.add("left-context");
|
||||
matchRowElement.appendChild(lcCellElement);
|
||||
for (let cpos of lc) {
|
||||
let token = chunk.cpos_lookup[cpos];
|
||||
lcCellElement.insertAdjacentHTML("beforeend",
|
||||
`<span class="token" data-cpos="${cpos}">${token.word} </span>`);
|
||||
}
|
||||
|
||||
// Get infos for hit of match and set actions.
|
||||
let textTitles = new Set();
|
||||
let aCellElement = document.createElement("td");
|
||||
aCellElement.classList.add("actions");
|
||||
let cCellElement = document.createElement("td");
|
||||
cCellElement.classList.add("match-hit");
|
||||
let textTitlesCellElement = document.createElement("td");
|
||||
textTitlesCellElement.classList.add("titles");
|
||||
let matchNrElement = document.createElement("td");
|
||||
matchNrElement.classList.add("match-nr");
|
||||
matchRowElement.appendChild(cCellElement);
|
||||
matchRowElement.appendChild(aCellElement);
|
||||
for (let cpos of c) {
|
||||
let token = chunk.cpos_lookup[cpos];
|
||||
cCellElement.insertAdjacentHTML("beforeend",
|
||||
`<span class="token" data-cpos="${cpos}">${token.word} </span>`);
|
||||
// Get text titles of every hit cpos token.
|
||||
textTitles.add(chunk.text_lookup[token.text].title);
|
||||
}
|
||||
// Add some interaction buttons.
|
||||
let css = `margin-right: 5px; margin-bottom: 5px;`
|
||||
let classes = `btn-floating btn waves-effect` +
|
||||
` waves-light corpus-analysis-color.lighten`
|
||||
// Add inspect button to trigger inspect view with more context.
|
||||
let inspectBtn = document.createElement("a");
|
||||
inspectBtn.setAttribute("style", css);
|
||||
inspectBtn.setAttribute("class", classes + ` disabled inspect`
|
||||
);
|
||||
inspectBtn.innerHTML = '<i class="material-icons inspect-btn">search</i>';
|
||||
// Add btn to be able add matches to sub-results.
|
||||
let addToSubResultsBtn = document.createElement("a");
|
||||
addToSubResultsBtn.setAttribute("style", css);
|
||||
addToSubResultsBtn.setAttribute("class", classes + ` add`);
|
||||
addToSubResultsBtn.innerHTML = '<i class="material-icons add-btn">add</i>';
|
||||
if (client.dynamicMode || client.fullContext) {
|
||||
aCellElement.appendChild(inspectBtn);
|
||||
}
|
||||
if (client.dynamicMode) {
|
||||
aCellElement.appendChild(addToSubResultsBtn);
|
||||
}
|
||||
// Add text titles at front as first td of one row.
|
||||
textTitlesCellElement.textContent = [...textTitles].join(", ");
|
||||
matchRowElement.insertAdjacentHTML("afterbegin", textTitlesCellElement.outerHTML);
|
||||
matchNrElement.textContent = values.index + 1;
|
||||
matchRowElement.insertAdjacentHTML("afterbegin", matchNrElement.outerHTML);
|
||||
|
||||
// Get infos for right context of match
|
||||
let rcCellElement = document.createElement("td");
|
||||
rcCellElement.classList.add("right-context");
|
||||
matchRowElement.appendChild(rcCellElement);
|
||||
for (let cpos of rc) {
|
||||
let token = chunk.cpos_lookup[cpos];
|
||||
rcCellElement.insertAdjacentHTML("beforeend",
|
||||
`<span class="token" data-cpos="${cpos}">${token.word} </span>`);
|
||||
}
|
||||
return matchRowElement
|
||||
}
|
||||
/**
|
||||
* Creates the HTML table code for the metadata view in the corpus analysis
|
||||
* interface
|
||||
*/
|
||||
createMetaDataForModal(metaDataObject) {
|
||||
let html = `<div class="col s12">
|
||||
<table class="highlight">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Meta Data Description</th>
|
||||
<th>Value</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>`
|
||||
for (let [outerKey, outerValue] of Object.entries(metaDataObject)) {
|
||||
// Use more descriptive names.
|
||||
if (outerKey === 'corpus_all_texts') {
|
||||
let tmpName = 'All texts in this corpus';
|
||||
html += `<tr>
|
||||
<td style="text-transform: uppercase;">${tmpName.replace(/_/g, " ")}
|
||||
</td>`
|
||||
} else if (outerKey === 'text_lookup') {
|
||||
let tmpName = 'Texts where the query resulted in matches'
|
||||
html += `<tr>
|
||||
<td style="text-transform: uppercase;">${tmpName.replace(/_/g, " ")}
|
||||
</td>`
|
||||
} else {
|
||||
html += `<tr>
|
||||
<td style="text-transform: uppercase;">${outerKey.replace(/_/g, " ")}
|
||||
</td>`
|
||||
}
|
||||
if (outerKey === "corpus_all_texts" || outerKey === "text_lookup") {
|
||||
html += `<td>
|
||||
<ul class="collapsible">`
|
||||
for (let [innerKey, innerValue] of Object.entries(outerValue)) {
|
||||
html += `<li class="text-metadata"
|
||||
data-metadata-key="${outerKey}"
|
||||
data-text-key="${innerKey}">
|
||||
<div class="collapsible-header"
|
||||
data-metadata-key="${outerKey}"
|
||||
data-text-key="${innerKey}">
|
||||
<i class="material-icons"
|
||||
data-metadata-key="${outerKey}"
|
||||
data-text-key="${innerKey}">info_outline</i>
|
||||
${innerValue['author']} - ${innerValue['publishing_year']} -
|
||||
${innerValue['title']}
|
||||
</div>
|
||||
<div class="collapsible-body">
|
||||
<span>
|
||||
<ul id="bibliographic-data-${outerKey}-${innerKey}"
|
||||
style="column-count: 2;">
|
||||
</ul>
|
||||
</span>
|
||||
</div>
|
||||
</li>`
|
||||
}
|
||||
html += `</ul>
|
||||
</td>`
|
||||
} else {
|
||||
html += `<td>${outerValue}</td>`
|
||||
}
|
||||
html += `</tr>`
|
||||
}
|
||||
html += `</tbody>
|
||||
</table>`
|
||||
return html
|
||||
}
|
||||
/**
|
||||
* Creates the text details for the texts shown in the corpus analysis
|
||||
* metadata modal table.
|
||||
*/
|
||||
createTextDetails(metaData) {
|
||||
let metadataKey = event.target.dataset.metadataKey;
|
||||
let textKey = event.target.dataset.textKey;
|
||||
let textData = metaData[metadataKey][textKey];
|
||||
let bibliographicData = document.querySelector(`#bibliographic-data-${metadataKey}-${textKey}`);
|
||||
bibliographicData.textContent = '';
|
||||
for (let [key, value] of Object.entries(textData)) {
|
||||
bibliographicData.insertAdjacentHTML("afterbegin",
|
||||
`
|
||||
<li>
|
||||
<span style="text-transform: capitalize;">${key}:</span>
|
||||
${value}
|
||||
</li>
|
||||
`
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Export classes.
|
||||
export {
|
||||
ViewEventListener,
|
||||
ResultsList
|
||||
};
|
244
app/static/js/modules/corpus_analysis/view/callbacks.js
Normal file
@ -0,0 +1,244 @@
|
||||
/**
|
||||
* This file contains all the callbacks triggered by the notificationListener.
|
||||
* Also general callbacks are defined which are doing some hiding/disabling and
|
||||
* showing/enabling of common elements when data is being transmitted or not.
|
||||
*/
|
||||
|
||||
// Callback to disable some elements for the user when the client is busy.
|
||||
function disableElementsGeneralCallback(resultsList, detail) {
|
||||
if (detail.client.isBusy) {
|
||||
resultsList.fullResultsCreate.classList.toggle('disabled', true);
|
||||
resultsList.subResultsCreate.classList.toggle('disabled', true);
|
||||
resultsList.toggleInspectButtons(detail.client);
|
||||
}
|
||||
}
|
||||
|
||||
// Callback to enable some elements for the user when the client is not busy.
|
||||
function enableElementsGeneralCallback(resultsList, detail) {
|
||||
if (!detail.client.isBusy) {
|
||||
resultsList.fullResultsCreate.classList.toggle('disabled', false);
|
||||
if (Object.keys(resultsList.subResultsIndexes).length > 0) {
|
||||
resultsList.subResultsCreate.classList.toggle('disabled', false);
|
||||
}
|
||||
resultsList.toggleInspectButtons(detail.client);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback opening the loading modal when the client is connecting to the
|
||||
* CQP server.
|
||||
*/
|
||||
function connectingCallback(resultsList, detail) {
|
||||
resultsList.getHTMLElements(['#analysis-init-modal']);
|
||||
resultsList.analysisInitModal = M.Modal.init(resultsList.analysisInitModal,
|
||||
{dismissible: false});
|
||||
resultsList.analysisInitModal.open();
|
||||
}
|
||||
|
||||
// Callback that closes the loading modal from above.
|
||||
function connectedCallback(resultsList, detail) {
|
||||
/**
|
||||
* In the past this closed the init modal. But the init modal is now being
|
||||
* closed when the meta data has also been recieved. See below.
|
||||
*/
|
||||
}
|
||||
|
||||
// Callback that closes the loading modal from above.
|
||||
function metaDataRecievedCallback(resultsList, detail) {
|
||||
resultsList.analysisInitModal.close();
|
||||
}
|
||||
|
||||
// Callback that shows the user some feedback if the client raised an error.
|
||||
function clientFailedCallback(resultsList, detail) {
|
||||
resultsList.getHTMLElements([
|
||||
'#analysis-init-progress',
|
||||
'#analysis-init-error',
|
||||
'#user-feedback',
|
||||
]);
|
||||
if (detail.raiseModalFeedback) {
|
||||
resultsList.analysisInitModal.open();
|
||||
resultsList.analysisInitProgress.classList.toggle('hide');
|
||||
resultsList.analysisInitError.classList.toggle('hide');
|
||||
resultsList.analysisInitError.textContent = detail.msg;
|
||||
} else {
|
||||
nopaque.appClient.flash(detail.msg, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
// Callback doing some preperation work if a query has been issued by the user.
|
||||
function queryDataPreparingCallback(resultsList, detail) {
|
||||
// remove all items from resultsList, like from the query issued before
|
||||
resultsList.clear()
|
||||
// get needed HTML Elements
|
||||
let results = detail.results;
|
||||
resultsList.getHTMLElements([
|
||||
'#interactions-menu',
|
||||
'#recieved-match-count',
|
||||
'#total-match-count',
|
||||
'#text-lookup-titles',
|
||||
'#text-lookup-count',
|
||||
'#query-results-user-feedback',
|
||||
'#query-progress-bar',
|
||||
'#query-results-create',
|
||||
'#sub-results-indexes-display',
|
||||
'#nr-marked-matches',
|
||||
]);
|
||||
// show or enable some things for the user
|
||||
resultsList.interactionsMenu.classList.toggle('hide', false);
|
||||
resultsList.queryResultsUserFeedback.classList.toggle('hide', false);
|
||||
resultsList.queryProgressBar.classList.toggle('hide', false);
|
||||
resultsList.showCorpusFiles.classList.toggle('disabled', true);
|
||||
/**
|
||||
* Set some initial values for the user feedback
|
||||
* or reset values for new issued query
|
||||
*/
|
||||
resultsList.recievedMatchCount.textContent = 0;
|
||||
resultsList.totalMatchCount.textContent = results.data.match_count;
|
||||
resultsList.textLookupCount.textContent = 0;
|
||||
resultsList.nrMarkedMatches.textContent = 0;
|
||||
resultsList.subResultsIndexesDisplay.textContent = '';
|
||||
resultsList.resetFields();
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback handling the incoming results of an issued query. It renders
|
||||
* the incoming matches using the resultsList for the user.
|
||||
*/
|
||||
function queryDataRecievingCallback(resultsList, detail) {
|
||||
// load the data into the resultsList and show them to the user
|
||||
let results = detail.results;
|
||||
let client = detail.client;
|
||||
let start = detail.dataLength;
|
||||
let resultItems = [];
|
||||
for (let [index, match] of Object.entries(results.data.matches).slice(start)) {
|
||||
resultItems.push({ ...match, ...{ 'index': parseInt(index) } });
|
||||
}
|
||||
if (client.dynamicMode) {
|
||||
resultsList.add(resultItems, (items) => {
|
||||
for (let item of items) {
|
||||
item.elm = resultsList.createResultRowElement(item,
|
||||
results.data,
|
||||
client);
|
||||
}
|
||||
});
|
||||
// update user feedback about query status
|
||||
resultsList.recievedMatchCount.textContent = results.data.matches.length;
|
||||
resultsList.queryProgressBar.firstElementChild.style.width = `${client.requestQueryProgress}%`;
|
||||
resultsList.textLookupCount.textContent = `${Object.keys(results.data.text_lookup).length}`;
|
||||
// updating table on finished item creation callback via createResultRowElement
|
||||
resultsList.update();
|
||||
resultsList.changeHitsPerPage(client, results);
|
||||
resultsList.changeContext();
|
||||
//activate expertMode of switch is checked
|
||||
if (resultsList.displayOptionsFormExpertMode.checked) {
|
||||
resultsList.expertModeOn('query-display', results);
|
||||
}
|
||||
} else if (!client.dynamicMode) {
|
||||
resultsList.add(resultItems, (items) => {
|
||||
for (let item of items) {
|
||||
item.elm = resultsList.createResultRowElement(item,
|
||||
results.data,
|
||||
client,
|
||||
true);
|
||||
}
|
||||
});
|
||||
// update user feedback about query status
|
||||
resultsList.recievedMatchCount.textContent = results.data.matches.length;
|
||||
resultsList.queryProgressBar.firstElementChild.style.width = `${client.requestQueryProgress}%`;
|
||||
resultsList.textLookupCount.textContent = `${Object.keys(results.data.text_lookup).length}`;
|
||||
// updating table on finished item creation callback via createResultRowElement
|
||||
resultsList.update();
|
||||
resultsList.changeHitsPerPage(client, results);
|
||||
resultsList.changeContext();
|
||||
}
|
||||
}
|
||||
|
||||
// Callback that is executed when all results from an issued query have been recieved
|
||||
function queryDataRecievedCallback(resultsList, detail) {
|
||||
// hide or disable some things for the user
|
||||
resultsList.queryResultsUserFeedback.classList.toggle('hide', true);
|
||||
resultsList.queryProgressBar.classList.toggle('hide', true);
|
||||
// reset bar progress for next query
|
||||
resultsList.queryProgressBar.firstElementChild.style.width = '0%';
|
||||
resultsList.showCorpusFiles.classList.toggle('disabled', false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback that is handling incoming results data. Results data is needed for
|
||||
* the export and download of the data.
|
||||
*/
|
||||
function resultsDataRecievingCallback(resultsList, detail) {
|
||||
resultsList.getHTMLElements([
|
||||
'#full-results-progress-bar',
|
||||
'#sub-results-progress-bar',
|
||||
]);
|
||||
// Disable the full context switch when results are being recieved.
|
||||
resultsList.exportFullInspectContext.setAttribute('disabled', '');
|
||||
if (detail.type === 'full-results' && detail.progress) {
|
||||
resultsList.fullResultsProgressBar.firstElementChild.style.width = `${detail.progress}%`;
|
||||
resultsList.fullResultsProgressBar.classList.toggle('hide', false);
|
||||
} else if (detail.type === 'sub-results' && detail.progress) {
|
||||
resultsList.subResultsProgressBar.firstElementChild.style.width = `${detail.progress}%`;
|
||||
resultsList.subResultsProgressBar.classList.toggle('hide', false);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Callback is executed when all results data has been recieved.
|
||||
* Reactivates the resutls create buttons etc.
|
||||
*/
|
||||
function resultsDataRecievedCallback(resultsList, detail) {
|
||||
// create strings for create buttons depending on type
|
||||
const handleType = (keyPrefix, text) => {
|
||||
// Enable the full context switch when results have been recieved
|
||||
resultsList.exportFullInspectContext.removeAttribute('disabled', '');
|
||||
// hides the create element after results have been recieved and reset it
|
||||
resultsList[`${keyPrefix}Create`].classList.toggle('hide');
|
||||
resultsList[`${keyPrefix}Create`].textContent = `Create ${text}`;
|
||||
resultsList[`${keyPrefix}Create`].insertAdjacentHTML('beforeend',
|
||||
`<i class="material-icons left">build</i>`);
|
||||
// show and highlight export button
|
||||
resultsList[`${keyPrefix}Export`].classList.toggle('hide', false);
|
||||
resultsList[`${keyPrefix}Export`].classList.toggle('pulse', true);
|
||||
setTimeout(() => {
|
||||
resultsList[`${keyPrefix}Export`].classList.toggle('pulse', false);
|
||||
clearTimeout();
|
||||
}, 3000)
|
||||
}
|
||||
if (detail.type === 'full-results') {
|
||||
handleType('fullResults', 'Results');
|
||||
if (detail.fullContext) {
|
||||
resultsList.fullResultsProgressBar.firstElementChild.style.width = `0%`;
|
||||
resultsList.fullResultsProgressBar.classList.toggle('hide', true);
|
||||
}
|
||||
} else if (detail.type ==='sub-results') {
|
||||
handleType('subResults', 'Sub-Results');
|
||||
if (detail.fullContext) {
|
||||
resultsList.subResultsProgressBar.firstElementChild.style.width = `0%`;
|
||||
resultsList.subResultsProgressBar.classList.toggle('hide', true);
|
||||
}
|
||||
} else if (detail.type ==='inspect-results') {
|
||||
if (Object.keys(resultsList.subResultsIndexes).length === 0) {
|
||||
/**
|
||||
* Prevent create sub results button from being activated if it is disabled
|
||||
* and no matches have been marked by the user for sub results creation.
|
||||
*/
|
||||
resultsList.subResultsCreate.classList.toggle('disabled', true);
|
||||
}
|
||||
resultsList.showMatchContext(detail.results, detail.client);
|
||||
}
|
||||
}
|
||||
|
||||
// export the callbacks
|
||||
export {
|
||||
connectingCallback,
|
||||
connectedCallback,
|
||||
metaDataRecievedCallback,
|
||||
clientFailedCallback,
|
||||
queryDataPreparingCallback,
|
||||
queryDataRecievingCallback,
|
||||
queryDataRecievedCallback,
|
||||
resultsDataRecievingCallback,
|
||||
resultsDataRecievedCallback,
|
||||
disableElementsGeneralCallback,
|
||||
enableElementsGeneralCallback,
|
||||
};
|
378
app/static/js/modules/corpus_analysis/view/listeners.js
Normal file
@ -0,0 +1,378 @@
|
||||
/**
|
||||
* 1.)
|
||||
* This file contains the listener function that will be assigned to the
|
||||
* corpus_analysis ResultsView. The listener is listening for the notification
|
||||
* event which is being dispatched by the corpus_analysis Client. The
|
||||
* notification Event triggers the listener which will call different
|
||||
* callback functions depending on the detail information of the notification
|
||||
* event.
|
||||
* 2.)
|
||||
* This file also contains vanilla javascript Event listeners which are
|
||||
* listening for button clicks etc. the user is doing for page interaction.
|
||||
* They will be registered the same way as teh listeners above.
|
||||
*/
|
||||
|
||||
import {
|
||||
connectingCallback,
|
||||
connectedCallback,
|
||||
metaDataRecievedCallback,
|
||||
clientFailedCallback,
|
||||
queryDataPreparingCallback,
|
||||
queryDataRecievingCallback,
|
||||
queryDataRecievedCallback,
|
||||
resultsDataRecievingCallback,
|
||||
resultsDataRecievedCallback,
|
||||
disableElementsGeneralCallback,
|
||||
enableElementsGeneralCallback,
|
||||
} from './callbacks.js';
|
||||
// Import the script that implements a spinner animation for buttons.
|
||||
import {
|
||||
loadingSpinnerHTML,
|
||||
} from './spinner.js';
|
||||
|
||||
|
||||
/**
|
||||
* The Listener listening for the notification event 'notify-view' dispatched
|
||||
* by the client and execeutes callbacks accordingly.
|
||||
*/
|
||||
function recieveClientNotification(eventType, resultsList) {
|
||||
document.addEventListener(eventType, (event) => {
|
||||
let caseIdentifier = event.detail.caseIdentifier;
|
||||
switch (caseIdentifier) {
|
||||
case 'client-failed':
|
||||
console.error('View recieved notification:', caseIdentifier);
|
||||
// execute callbacks
|
||||
clientFailedCallback(resultsList, event.detail);
|
||||
break;
|
||||
case 'connecting':
|
||||
console.info('View recieved notification:', caseIdentifier);
|
||||
connectingCallback(resultsList, event.detail);
|
||||
// execute callbacks
|
||||
break;
|
||||
case 'connected':
|
||||
console.info('View recieved notification:', caseIdentifier);
|
||||
connectedCallback(resultsList, event.detail);
|
||||
break;
|
||||
case 'meta-data-recieving':
|
||||
console.info('View recieved notification:', caseIdentifier);
|
||||
break;
|
||||
case 'meta-data-recieved':
|
||||
console.info('View recieved notification:', caseIdentifier);
|
||||
// execute
|
||||
metaDataRecievedCallback(resultsList, event.detail);
|
||||
break;
|
||||
case 'query-data-prepareing':
|
||||
console.info('View recieved notification:', caseIdentifier);
|
||||
// Hide all download buttons
|
||||
resultsList.fullResultsExport.classList.toggle('hide', true);
|
||||
resultsList.subResultsExport.classList.toggle('hide', true);
|
||||
// Show all create buttons
|
||||
resultsList.fullResultsCreate.classList.toggle('hide', false);
|
||||
resultsList.subResultsCreate.classList.toggle('hide', false);
|
||||
// execute callbacks
|
||||
disableElementsGeneralCallback(resultsList, event.detail);
|
||||
queryDataPreparingCallback(resultsList, event.detail);
|
||||
break;
|
||||
case 'query-data-recieving':
|
||||
console.info('View recieved notification:', caseIdentifier);
|
||||
// execute callbacks
|
||||
queryDataRecievingCallback(resultsList, event.detail);
|
||||
break;
|
||||
case 'query-data-recieved':
|
||||
console.info('View recieved notification:', caseIdentifier);
|
||||
// execute callbacks
|
||||
queryDataRecievedCallback(resultsList, event.detail);
|
||||
enableElementsGeneralCallback(resultsList, event.detail);
|
||||
// create sub-results is disabled per default until matches have been added
|
||||
resultsList.subResultsCreate.classList.toggle('disabled', true);
|
||||
break;
|
||||
case 'results-data-recieving':
|
||||
console.info('View recieved notification:', caseIdentifier);
|
||||
// execute callbacks
|
||||
disableElementsGeneralCallback(resultsList, event.detail);
|
||||
resultsDataRecievingCallback(resultsList, event.detail);
|
||||
break;
|
||||
case 'results-data-recieved':
|
||||
console.info('View recieved notification:', caseIdentifier);
|
||||
// execute callbacks
|
||||
console.info(event.detail);
|
||||
resultsDataRecievedCallback(resultsList, event.detail);
|
||||
enableElementsGeneralCallback(resultsList, event.detail);
|
||||
break;
|
||||
default:
|
||||
console.error('Recieved unkown notification case identifier from Client');
|
||||
// do something to not crash the analysis session?
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* This are some vanilla javascript Event listeners which are listening
|
||||
* for button clicks etc. the user is doing to interact with the page.
|
||||
* They will be registered the same way as the listeners above.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The following listener handles what functions are called when the user
|
||||
* does use the page navigation to navigate to a new page.
|
||||
*/
|
||||
function pageNavigation(resultsList, results, client) {
|
||||
for (let element of resultsList.pagination) {
|
||||
element.addEventListener("click", (event) => {
|
||||
// Shows match context according to the user picked value on a new page.
|
||||
resultsList.changeContext();
|
||||
// De- or activates expertMode on new page depending on switch value.
|
||||
if (resultsList.displayOptionsFormExpertMode.checked) {
|
||||
resultsList.expertModeOn('query-display', results);
|
||||
} else {
|
||||
resultsList.expertModeOff('query-display');
|
||||
}
|
||||
// Activates inspect buttons on new page if client is not busy.
|
||||
resultsList.toggleInspectButtons(client);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The following event Listener handles the expert mode switch for the list.
|
||||
*/
|
||||
function expertModeSwitch(resultsList, results) {
|
||||
resultsList.displayOptionsFormExpertMode.onchange = (event) => {
|
||||
if (event.target.checked) {
|
||||
resultsList.expertModeOn('query-display', results);
|
||||
} else {
|
||||
resultsList.expertModeOff('query-display');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The following event Listener handles the add-btn and the inspect-btn
|
||||
* onclick events via bubbleing.
|
||||
*/
|
||||
function actionButtons(resultsList, results, client) {
|
||||
resultsList.queryResultsTable.addEventListener('click', (event) => {
|
||||
let dataIndex;
|
||||
if (event.target.classList.contains('inspect-btn')) {
|
||||
dataIndex = parseInt(event.target.closest('tr').dataset.index);
|
||||
resultsList.inspect(client, results, [dataIndex], 'inspect');
|
||||
} else if (event.target.classList.contains('add-btn')) {
|
||||
dataIndex = parseInt(event.target.closest('tr').dataset.index);
|
||||
resultsList.addToSubResults(dataIndex, client);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Following event listeners handle the change of Context size per match and
|
||||
* the number of matches shown per page.
|
||||
*/
|
||||
function displayOptions(resultsList, results, client) {
|
||||
resultsList.displayOptionsFormResultsPerPage.onchange = (event) => {
|
||||
resultsList.changeHitsPerPage(client, results);
|
||||
};
|
||||
resultsList.displayOptionsFormResultContext.onchange = (event) => {
|
||||
resultsList.changeContext();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The following event listener handles the show metadata button and its
|
||||
* functionality.
|
||||
*/
|
||||
function showMetaData(resultsList, results) {
|
||||
resultsList.showMetaData.onclick = () => {
|
||||
resultsList.metaDataModalContent.textContent = '';
|
||||
let table = resultsList.createMetaDataForModal(results.metaData);
|
||||
resultsList.metaDataModalContent.insertAdjacentHTML('afterbegin', table);
|
||||
resultsList.metaDataModal.open();
|
||||
let collapsibles = resultsList.metaDataModalContent.querySelectorAll(".text-metadata");
|
||||
for (let collapsible of collapsibles) {
|
||||
collapsible.onclick = () => {
|
||||
let elems = resultsList.metaDataModalContent.querySelectorAll('.collapsible');
|
||||
let instances = M.Collapsible.init(elems, {accordion: false});
|
||||
resultsList.createTextDetails(results.metaData);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The following event listener handles the button showing infos about matches
|
||||
* and their corresponding corpus files
|
||||
*/
|
||||
|
||||
function showCorpusFiles(resultsList, results) {
|
||||
resultsList.showCorpusFiles.onclick = () => {
|
||||
resultsList.showCorpusFilesModalContent.innerHTML = '';
|
||||
let htmlString = `
|
||||
<div id="corpus-file-table">
|
||||
<ul class="pagination paginationTop"></ul>
|
||||
<table class="responsive-table highlight">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="sort" data-sort="title">Title</th>
|
||||
<th class="sort" data-sort="year">Year</th>
|
||||
<th class="sort" data-sort="match-count">Match count in this text</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="list">
|
||||
`
|
||||
for (let [key, value] of Object.entries(results.data.text_lookup)) {
|
||||
htmlString += `
|
||||
<tr>
|
||||
<td class="title">${value.title}</td>
|
||||
<td class="year">${value.publishing_year}</td>
|
||||
<td class="match-count">${value.match_count}</td>
|
||||
</tr>
|
||||
`
|
||||
}
|
||||
htmlString += `
|
||||
</tbody>
|
||||
</table>
|
||||
<ul class="pagination paginationBottom"></ul>
|
||||
</div>
|
||||
`
|
||||
resultsList.showCorpusFilesModalContent.insertAdjacentHTML('afterbegin', htmlString);
|
||||
resultsList.showCorpusFilesModal.open();
|
||||
let options = {
|
||||
page: 10,
|
||||
pagination: [{
|
||||
name: "paginationTop",
|
||||
paginationClass: "paginationTop",
|
||||
innerWindow: 8,
|
||||
outerWindow: 1
|
||||
}, {
|
||||
paginationClass: "paginationBottom",
|
||||
innerWindow: 8,
|
||||
outerWindow: 1
|
||||
}],
|
||||
valueNames: ["title", "year", "match-count"],
|
||||
};
|
||||
let corpusFileTable = new List('corpus-file-table', options);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if resultsList.exportFullInspectContext switch is changed.
|
||||
* If it has been changed reset all Download buttons.
|
||||
*/
|
||||
function exportFullContextSwitch(resultsList) {
|
||||
resultsList.exportFullInspectContext.onchange = (event) => {
|
||||
// Hide all download buttons.
|
||||
resultsList.fullResultsExport.classList.toggle('hide', true);
|
||||
resultsList.subResultsExport.classList.toggle('hide', true);
|
||||
// Show result create buttons.
|
||||
resultsList.fullResultsCreate.classList.toggle('hide', false);
|
||||
resultsList.subResultsCreate.classList.toggle('hide', false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The following event listeners are handeling the data export.
|
||||
* 1. Create full-results
|
||||
* 2. Create sub-results
|
||||
* 3. Download full-results
|
||||
* 4. Download sub-results
|
||||
* 5. Download single inspect-results
|
||||
*/
|
||||
|
||||
// 1. Add events for full-results create
|
||||
function createFullResults(resultsList, results) {
|
||||
resultsList.fullResultsCreate.onclick = (event) => {
|
||||
resultsList.fullResultsCreate.querySelector('i').classList.toggle('hide');
|
||||
resultsList.fullResultsCreate.textContent = 'Creating...';
|
||||
resultsList.fullResultsCreate.insertAdjacentHTML('afterbegin',
|
||||
loadingSpinnerHTML);
|
||||
// .keys() is for a zero based array. I think...
|
||||
let dataIndexes = [...Array(results.data.match_count).keys()];
|
||||
// Empty fullResultsData so that no previous data is used.
|
||||
results.fullResultsData.init();
|
||||
resultsList.notifyClient('get-results', {resultsType: 'full-results',
|
||||
dataIndexes: dataIndexes,
|
||||
resultsList: resultsList,});
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Add events for sub-results create
|
||||
function createSubResults(resultsList, results) {
|
||||
resultsList.subResultsCreate.onclick = (event) => {
|
||||
let dataIndexes = [];
|
||||
Object.keys(resultsList.subResultsIndexes).forEach((id) => {
|
||||
dataIndexes.push(id);
|
||||
});
|
||||
resultsList.subResultsCreate.querySelector('i').classList.toggle('hide');
|
||||
resultsList.subResultsCreate.textContent = 'Creating...';
|
||||
resultsList.subResultsCreate.insertAdjacentHTML('afterbegin',
|
||||
loadingSpinnerHTML);
|
||||
// Empty subResultsData so that no previous data is used.
|
||||
results.subResultsData.init();
|
||||
resultsList.notifyClient('get-results', {resultsType: 'sub-results',
|
||||
dataIndexes: dataIndexes,
|
||||
resultsList: resultsList,});
|
||||
}
|
||||
}
|
||||
// 3. Open download modal when full results export button is pressed
|
||||
function exportFullResults(resultsList, results) {
|
||||
resultsList.fullResultsExport.onclick = (event) => {
|
||||
resultsList.queryResultsDownloadModal.open();
|
||||
// add onclick to download JSON button and download the file
|
||||
resultsList.downloadResultsJson.onclick = (event) => {
|
||||
let suffix = 'full-results'
|
||||
if (resultsList.exportFullInspectContext.checked) {
|
||||
suffix += '_full-context';
|
||||
}
|
||||
let filename = results.fullResultsData.createDownloadFilename(suffix);
|
||||
results.fullResultsData.addData(results.metaData);
|
||||
results.fullResultsData.downloadJSONRessource(filename,
|
||||
results.fullResultsData,
|
||||
resultsList.downloadResultsJson)};
|
||||
}
|
||||
}
|
||||
// 4. Open download modal when sub results export button is pressed
|
||||
function exportSubResults(resultsList, results) {
|
||||
resultsList.subResultsExport.onclick = (event) => {
|
||||
resultsList.queryResultsDownloadModal.open();
|
||||
// add onclick to download JSON button and download the file
|
||||
resultsList.downloadResultsJson.onclick = (event) => {
|
||||
let suffix = 'sub-results'
|
||||
if (resultsList.exportFullInspectContext.checked) {
|
||||
suffix += '_full-context';
|
||||
}
|
||||
let filename = results.subResultsData.createDownloadFilename(suffix);
|
||||
results.subResultsData.addData(results.metaData);
|
||||
results.subResultsData.downloadJSONRessource(filename,
|
||||
results.subResultsData,
|
||||
resultsList.downloadResultsJson)};
|
||||
}
|
||||
}
|
||||
// 5. Open download modal when inspect-results-export button is pressed
|
||||
function exportSingleMatch(resultsList, results) {
|
||||
resultsList.inspectResultsExport.onclick = (event) => {
|
||||
resultsList.queryResultsDownloadModal.open();
|
||||
// add onclick to download JSON button and download the file
|
||||
resultsList.downloadResultsJson.onclick = (event) => {
|
||||
let filename = results.subResultsData.createDownloadFilename('inspect-results_full-context');
|
||||
results.subResultsData.addData(results.metaData);
|
||||
results.subResultsData.downloadJSONRessource(filename,
|
||||
results.inspectResultsData,
|
||||
resultsList.downloadResultsJson)};
|
||||
}
|
||||
}
|
||||
|
||||
// export listeners
|
||||
export {
|
||||
recieveClientNotification,
|
||||
pageNavigation,
|
||||
expertModeSwitch,
|
||||
actionButtons,
|
||||
displayOptions,
|
||||
showMetaData,
|
||||
showCorpusFiles,
|
||||
exportFullContextSwitch,
|
||||
createFullResults,
|
||||
createSubResults,
|
||||
exportFullResults,
|
||||
exportSubResults,
|
||||
exportSingleMatch,
|
||||
};
|
27
app/static/js/modules/corpus_analysis/view/scrollToTop.js
Normal file
@ -0,0 +1,27 @@
|
||||
/**
|
||||
* Function to show a scroll to top button if the user has scrolled down
|
||||
* 250 pixels from the with scrollToElementSelector specified Element.
|
||||
*/
|
||||
function scrollToTop(scrollToElementSelector, triggerElementSelector) {
|
||||
let scrollToThis = document.querySelector(scrollToElementSelector);
|
||||
let scrolltoTopTrigger = document.querySelector(triggerElementSelector);
|
||||
window.addEventListener('scroll', (event) => {
|
||||
if (pageYOffset > 250) {
|
||||
scrolltoTopTrigger.classList.toggle('hide', false);
|
||||
} else {
|
||||
scrolltoTopTrigger.classList.toggle('hide', true);
|
||||
}
|
||||
});
|
||||
scrolltoTopTrigger.onclick = () => {
|
||||
scrollToThis.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'start',
|
||||
inline: 'nearest'
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
// Export function.
|
||||
export {
|
||||
scrollToTop
|
||||
};
|
19
app/static/js/modules/corpus_analysis/view/spinner.js
Normal file
@ -0,0 +1,19 @@
|
||||
// loading spinner animation HTML
|
||||
const loadingSpinnerHTML = `
|
||||
<div class="preloader-wrapper button-icon-spinner small active">
|
||||
<div class="spinner-layer spinner-green-only">
|
||||
<div class="circle-clipper left">
|
||||
<div class="circle"></div>
|
||||
</div><div class="gap-patch">
|
||||
<div class="circle"></div>
|
||||
</div><div class="circle-clipper right">
|
||||
<div class="circle"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Export const.
|
||||
export {
|
||||
loadingSpinnerHTML
|
||||
};
|
103
app/static/js/nopaque/RessourceDisplays/CorpusDisplay.js
Normal file
@ -0,0 +1,103 @@
|
||||
class CorpusDisplay extends RessourceDisplay {
|
||||
constructor(displayElement) {
|
||||
super(displayElement);
|
||||
this.corpusId = displayElement.dataset.corpusId;
|
||||
this.user.eventListeners.corpus.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), this.corpusId);
|
||||
}
|
||||
|
||||
init() {
|
||||
for (let exportCorpusTriggerElement of this.displayElement.querySelectorAll('.export-corpus-trigger')) {exportCorpusTriggerElement.addEventListener('click', () => this.requestCorpusExport());}
|
||||
nopaque.appClient.socket.on(`export_corpus_${this.user.data.corpora[this.corpusId].id}`, () => this.downloadCorpus());
|
||||
this.setCreationDate(this.user.data.corpora[this.corpusId].creation_date);
|
||||
this.setDescription(this.user.data.corpora[this.corpusId].description);
|
||||
this.setLastEditedDate(this.user.data.corpora[this.corpusId].last_edited_date);
|
||||
this.setStatus(this.user.data.corpora[this.corpusId].status);
|
||||
this.setTitle(this.user.data.corpora[this.corpusId].title);
|
||||
this.setTokenRatio(this.user.data.corpora[this.corpusId].current_nr_of_tokens, this.user.data.corpora[this.corpusId].max_nr_of_tokens);
|
||||
}
|
||||
|
||||
patch(patch) {
|
||||
let re;
|
||||
for (let operation of patch) {
|
||||
switch(operation.op) {
|
||||
case 'replace':
|
||||
// Matches: /jobs/{this.job.id}/status
|
||||
re = new RegExp('^/corpora/' + this.user.data.corpora[this.corpusId].id + '/last_edited_date');
|
||||
if (re.test(operation.path)) {this.setLastEditedDate(operation.value); break;}
|
||||
// Matches: /jobs/{this.job.id}/status
|
||||
re = new RegExp('^/corpora/' + this.user.data.corpora[this.corpusId].id + '/status$');
|
||||
if (re.test(operation.path)) {this.setStatus(operation.value); break;}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
requestCorpusExport() {
|
||||
nopaque.appClient.socket.emit('export_corpus', this.user.data.corpora[this.corpusId].id);
|
||||
nopaque.appClient.flash('Preparing your corpus export...', 'corpus');
|
||||
for (let exportCorpusTriggerElement of this.displayElement.querySelectorAll('.export-corpus-trigger')) {exportCorpusTriggerElement.classList.toggle('disabled', true);}
|
||||
}
|
||||
|
||||
downloadCorpus() {
|
||||
nopaque.appClient.flash('Corpus export is done. Your corpus download is ready!', 'corpus');
|
||||
for (let exportCorpusTriggerElement of this.displayElement.querySelectorAll('.export-corpus-trigger')) {exportCorpusTriggerElement.classList.toggle('disabled', false);}
|
||||
// Little trick to call the download view after ziping has finished
|
||||
let fakeBtn = document.createElement('a');
|
||||
fakeBtn.href = `/corpora/${this.user.data.corpora[this.corpusId].id}/download`;
|
||||
fakeBtn.click();
|
||||
}
|
||||
|
||||
setTitle(title) {
|
||||
for (let element of this.displayElement.querySelectorAll('.corpus-title')) {this.setElement(element, title);}
|
||||
}
|
||||
|
||||
setTokenRatio(currentNrOfTokens, maxNrOfTokens) {
|
||||
let tokenRatio = `${currentNrOfTokens}/${maxNrOfTokens}`;
|
||||
for (let element of this.displayElement.querySelectorAll('.corpus-token-ratio')) {this.setElement(element, tokenRatio);}
|
||||
}
|
||||
|
||||
setDescription(description) {
|
||||
for (let element of this.displayElement.querySelectorAll('.corpus-description')) {this.setElement(element, description);}
|
||||
}
|
||||
|
||||
setStatus(status) {
|
||||
for (let element of this.displayElement.querySelectorAll('.analyse-corpus-trigger')) {
|
||||
if (['analysing', 'prepared', 'start analysis'].includes(status)) {
|
||||
element.classList.remove('disabled');
|
||||
} else {
|
||||
element.classList.add('disabled');
|
||||
}
|
||||
}
|
||||
for (let element of this.displayElement.querySelectorAll('.build-corpus-trigger')) {
|
||||
if (status === 'unprepared' && Object.values(this.user.data.corpora[this.corpusId].files).length > 0) {
|
||||
element.classList.remove('disabled');
|
||||
} else {
|
||||
element.classList.add('disabled');
|
||||
}
|
||||
}
|
||||
for (let element of this.displayElement.querySelectorAll('.corpus-status')) {this.setElement(element, status);}
|
||||
for (let exportCorpusTriggerElement of this.displayElement.querySelectorAll('.export-corpus-trigger')) {
|
||||
exportCorpusTriggerElement.classList.toggle('disabled', !['prepared', 'start analysis', 'stop analysis'].includes(status));
|
||||
}
|
||||
for (let element of this.displayElement.querySelectorAll('.status')) {element.dataset.status = status;}
|
||||
for (let element of this.displayElement.querySelectorAll('.status-spinner')) {
|
||||
if (['submitted', 'queued', 'running', 'canceling', 'start analysis', 'stop analysis'].includes(status)) {
|
||||
element.classList.remove('hide');
|
||||
} else {
|
||||
element.classList.add('hide');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setCreationDate(creationDateTimestamp) {
|
||||
let creationDate = new Date(creationDateTimestamp * 1000).toLocaleString("en-US");
|
||||
for (let element of this.displayElement.querySelectorAll('.corpus-creation-date')) {this.setElement(element, creationDate);}
|
||||
}
|
||||
|
||||
setLastEditedDate(endDateTimestamp) {
|
||||
let endDate = new Date(endDateTimestamp * 1000).toLocaleString("en-US");
|
||||
for (let element of this.displayElement.querySelectorAll('.corpus-end-date')) {this.setElement(element, endDate);}
|
||||
}
|
||||
}
|
87
app/static/js/nopaque/RessourceDisplays/JobDisplay.js
Normal file
@ -0,0 +1,87 @@
|
||||
class JobDisplay extends RessourceDisplay {
|
||||
constructor(displayElement) {
|
||||
super(displayElement);
|
||||
this.jobId = displayElement.dataset.jobId;
|
||||
this.user.eventListeners.job.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), this.jobId);
|
||||
}
|
||||
|
||||
init(job) {
|
||||
this.setCreationDate(this.user.data.jobs[this.jobId].creation_date);
|
||||
this.setEndDate(this.user.data.jobs[this.jobId].creation_date);
|
||||
this.setDescription(this.user.data.jobs[this.jobId].description);
|
||||
this.setService(this.user.data.jobs[this.jobId].service);
|
||||
this.setServiceArgs(this.user.data.jobs[this.jobId].service_args);
|
||||
this.setServiceVersion(this.user.data.jobs[this.jobId].service_version);
|
||||
this.setStatus(this.user.data.jobs[this.jobId].status);
|
||||
this.setTitle(this.user.data.jobs[this.jobId].title);
|
||||
}
|
||||
|
||||
patch(patch) {
|
||||
let re;
|
||||
for (let operation of patch) {
|
||||
switch(operation.op) {
|
||||
case 'replace':
|
||||
// Matches: /jobs/{this.user.data.jobs[this.jobId].id}/status
|
||||
re = new RegExp('^/jobs/' + this.user.data.jobs[this.jobId].id + '/end_date');
|
||||
if (re.test(operation.path)) {this.setEndDate(operation.value); break;}
|
||||
// Matches: /jobs/{this.user.data.jobs[this.jobId].id}/status
|
||||
re = new RegExp('^/jobs/' + this.user.data.jobs[this.jobId].id + '/status$');
|
||||
if (re.test(operation.path)) {this.setStatus(operation.value); break;}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setTitle(title) {
|
||||
for (let element of this.displayElement.querySelectorAll('.job-title')) {this.setElement(element, title);}
|
||||
}
|
||||
|
||||
setDescription(description) {
|
||||
for (let element of this.displayElement.querySelectorAll('.job-description')) {this.setElement(element, description);}
|
||||
}
|
||||
|
||||
setStatus(status) {
|
||||
for (let element of this.displayElement.querySelectorAll('.job-status')) {
|
||||
this.setElement(element, status);
|
||||
}
|
||||
for (let element of this.displayElement.querySelectorAll('.status')) {element.dataset.status = status;}
|
||||
for (let element of this.displayElement.querySelectorAll('.status-spinner')) {
|
||||
if (['complete', 'failed'].includes(status)) {
|
||||
element.classList.add('hide');
|
||||
} else {
|
||||
element.classList.remove('hide');
|
||||
}
|
||||
}
|
||||
for (let element of this.displayElement.querySelectorAll('.restart-job-trigger')) {
|
||||
if (['complete', 'failed'].includes(status)) {
|
||||
element.classList.remove('hide');
|
||||
} else {
|
||||
element.classList.add('hide');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setCreationDate(creationDateTimestamp) {
|
||||
let creationDate = new Date(creationDateTimestamp * 1000).toLocaleString("en-US");
|
||||
for (let element of this.displayElement.querySelectorAll('.job-creation-date')) {this.setElement(element, creationDate);}
|
||||
}
|
||||
|
||||
setEndDate(endDateTimestamp) {
|
||||
let endDate = new Date(endDateTimestamp * 1000).toLocaleString("en-US");
|
||||
for (let element of this.displayElement.querySelectorAll('.job-end-date')) {this.setElement(element, endDate);}
|
||||
}
|
||||
|
||||
setService(service) {
|
||||
for (let element of this.displayElement.querySelectorAll('.job-service')) {this.setElement(element, service);}
|
||||
}
|
||||
|
||||
setServiceArgs(serviceArgs) {
|
||||
for (let element of this.displayElement.querySelectorAll('.job-service-args')) {this.setElement(element, serviceArgs);}
|
||||
}
|
||||
|
||||
setServiceVersion(serviceVersion) {
|
||||
for (let element of this.displayElement.querySelectorAll('.job-service-version')) {this.setElement(element, serviceVersion);}
|
||||
}
|
||||
}
|
45
app/static/js/nopaque/RessourceDisplays/RessourceDisplay.js
Normal file
@ -0,0 +1,45 @@
|
||||
class RessourceDisplay {
|
||||
constructor(displayElement) {
|
||||
if (displayElement.dataset.userId) {
|
||||
if (displayElement.dataset.userId in nopaque.appClient.users) {
|
||||
this.user = nopaque.appClient.users[displayElement.dataset.userId];
|
||||
} else {
|
||||
console.error(`User not found: ${displayElement.dataset.userId}`);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
this.user = nopaque.appClient.users.self;
|
||||
}
|
||||
this.displayElement = displayElement;
|
||||
}
|
||||
|
||||
eventHandler(eventType, payload) {
|
||||
switch (eventType) {
|
||||
case 'init':
|
||||
this.init(payload);
|
||||
break;
|
||||
case 'patch':
|
||||
this.patch(payload);
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown event type: ${eventType}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
init() {console.error('init method not implemented!');}
|
||||
|
||||
patch() {console.error('patch method not implemented!');}
|
||||
|
||||
setElement(element, value) {
|
||||
switch (element.tagName) {
|
||||
case 'INPUT':
|
||||
element.value = value;
|
||||
M.updateTextFields();
|
||||
break;
|
||||
default:
|
||||
element.innerText = value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
97
app/static/js/nopaque/RessourceLists/CorpusFileList.js
Normal file
@ -0,0 +1,97 @@
|
||||
class CorpusFileList extends RessourceList {
|
||||
constructor(listElement, options = {}) {
|
||||
super(listElement, {...CorpusFileList.options, ...options});
|
||||
this.corpusId = listElement.dataset.corpusId;
|
||||
this.user.eventListeners.corpus.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), this.corpusId);
|
||||
}
|
||||
|
||||
init() {
|
||||
super.init(this.user.data.corpora[this.corpusId].files);
|
||||
}
|
||||
|
||||
onclick(event) {
|
||||
let ressourceElement = event.target.closest('tr');
|
||||
if (ressourceElement === null) {return;}
|
||||
let corpusFileId = ressourceElement.dataset.id;
|
||||
let actionButtonElement = event.target.closest('.action-button');
|
||||
if (actionButtonElement === null) {return;}
|
||||
let action = actionButtonElement === null ? 'view' : actionButtonElement.dataset.action;
|
||||
switch (action) {
|
||||
case 'delete':
|
||||
let deleteModalHTML = `<div class="modal">
|
||||
<div class="modal-content">
|
||||
<h4>Confirm corpus deletion</h4>
|
||||
<p>Do you really want to delete the corpus file <b>${this.user.data.corpora[this.corpusId].files[corpusFileId].filename}</b>? It will be permanently deleted!</p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#!" class="btn modal-close waves-effect waves-light">Cancel</a>
|
||||
<a class="btn modal-close red waves-effect waves-light" href="${this.user.data.corpora[this.corpusId].files[corpusFileId].url}/delete"><i class="material-icons left">delete</i>Delete</a>
|
||||
</div>
|
||||
</div>`;
|
||||
let deleteModalParentElement = document.querySelector('#modals');
|
||||
deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML);
|
||||
let deleteModalElement = deleteModalParentElement.lastChild;
|
||||
let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}});
|
||||
deleteModal.open();
|
||||
break;
|
||||
case 'download':
|
||||
window.location.href = this.user.data.corpora[this.corpusId].files[corpusFileId].download_url;
|
||||
break;
|
||||
case 'view':
|
||||
if (corpusFileId !== '-1') {window.location.href = this.user.data.corpora[this.corpusId].files[corpusFileId].url;}
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown action: "${action}"`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
patch(patch) {
|
||||
let id, match, re, valueName;
|
||||
for (let operation of patch) {
|
||||
switch(operation.op) {
|
||||
case 'add':
|
||||
// Matches the only paths that should be handled here: /corpora/{this.user.data.corpora[this.corpusId].id}/files/{corpusFileId}
|
||||
re = new RegExp('^/corpora/' + this.user.data.corpora[this.corpusId].id + '/files/(\\d+)$');
|
||||
if (re.test(operation.path)) {this.add(operation.value);}
|
||||
break;
|
||||
case 'remove':
|
||||
// See case add ;)
|
||||
re = new RegExp('^/corpora/' + this.user.data.corpora[this.corpusId].id + '/files/(\\d+)$');
|
||||
if (re.test(operation.path)) {
|
||||
[match, id] = operation.path.match(re);
|
||||
this.remove(id);
|
||||
}
|
||||
break;
|
||||
case 'replace':
|
||||
// Matches the only paths that should be handled here: /corpora/{corpusId}/{status || description || title}
|
||||
re = new RegExp('^/corpora/' + this.user.data.corpora[this.corpusId].id + '/files/(\\d+)/(author|filename|publishing_year|title)$');
|
||||
if (re.test(operation.path)) {
|
||||
[match, id, valueName] = operation.path.match(re);
|
||||
this.replace(id, valueName, operation.value);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
preprocessRessource(corpusFile) {
|
||||
return {id: corpusFile.id, author: corpusFile.author, filename: corpusFile.filename, publishing_year: corpusFile.publishing_year, title: corpusFile.title};
|
||||
}
|
||||
}
|
||||
CorpusFileList.options = {
|
||||
item: `<tr>
|
||||
<td><span class="filename"></span></td>
|
||||
<td><span class="author"></span></td>
|
||||
<td><span class="title"></span></td>
|
||||
<td><span class="publishing_year"></span></td>
|
||||
<td class="right-align">
|
||||
<a class="action-button btn-floating red tooltipped waves-effect waves-light" data-action="delete" data-position="top" data-tooltip="Delete"><i class="material-icons">delete</i></a>
|
||||
<a class="action-button btn-floating tooltipped waves-effect waves-light" data-action="download" data-position="top" data-tooltip="View"><i class="material-icons">file_download</i></a>
|
||||
<a class="action-button btn-floating tooltipped waves-effect waves-light" data-action="view" data-position="top" data-tooltip="View"><i class="material-icons">send</i></a>
|
||||
</td>
|
||||
</tr>`,
|
||||
valueNames: [{data: ['id']}, 'author', 'filename', 'publishing_year', 'title']
|
||||
};
|
93
app/static/js/nopaque/RessourceLists/CorpusList.js
Normal file
@ -0,0 +1,93 @@
|
||||
class CorpusList extends RessourceList {
|
||||
constructor(listElement, options = {}) {
|
||||
super(listElement, {...CorpusList.options, ...options});
|
||||
this.user.eventListeners.corpus.addEventListener((eventType, payload) => this.eventHandler(eventType, payload));
|
||||
}
|
||||
|
||||
init() {
|
||||
super.init(this.user.data.corpora);
|
||||
}
|
||||
|
||||
onclick(event) {
|
||||
let ressourceElement = event.target.closest('tr');
|
||||
if (ressourceElement === null) {return;}
|
||||
let corpusId = ressourceElement.dataset.id;
|
||||
let actionButtonElement = event.target.closest('.action-button');
|
||||
let action = (actionButtonElement === null) ? 'view' : actionButtonElement.dataset.action;
|
||||
switch (action) {
|
||||
case 'delete':
|
||||
let deleteModalHTML = `<div class="modal">
|
||||
<div class="modal-content">
|
||||
<h4>Confirm corpus deletion</h4>
|
||||
<p>Do you really want to delete the corpus <b>${this.user.data.corpora[corpusId].title}</b>? All files will be permanently deleted!</p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#!" class="btn modal-close waves-effect waves-light">Cancel</a>
|
||||
<a class="btn modal-close red waves-effect waves-light" href="${this.user.data.corpora[corpusId].url}/delete"><i class="material-icons left">delete</i>Delete</a>
|
||||
</div>
|
||||
</div>`;
|
||||
let deleteModalParentElement = document.querySelector('#modals');
|
||||
deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML);
|
||||
let deleteModalElement = deleteModalParentElement.lastChild;
|
||||
let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}});
|
||||
deleteModal.open();
|
||||
break;
|
||||
case 'view':
|
||||
if (corpusId !== '-1') {window.location.href = this.user.data.corpora[corpusId].url;}
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown action: ${action}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
patch(patch) {
|
||||
let id, match, re, valueName;
|
||||
for (let operation of patch) {
|
||||
switch(operation.op) {
|
||||
case 'add':
|
||||
// Matches the only paths that should be handled here: /corpora/{corpusId}
|
||||
re = /^\/corpora\/(\d+)$/;
|
||||
if (re.test(operation.path)) {this.add(operation.value);}
|
||||
break;
|
||||
case 'remove':
|
||||
// See case 'add' ;)
|
||||
re = /^\/corpora\/(\d+)$/;
|
||||
if (re.test(operation.path)) {
|
||||
[match, id] = operation.path.match(re);
|
||||
this.remove(id);
|
||||
}
|
||||
break;
|
||||
case 'replace':
|
||||
// Matches the only paths that should be handled here: /corpora/{corpusId}/{status || description || title}
|
||||
re = /^\/corpora\/(\d+)\/(status|description|title)$/;
|
||||
if (re.test(operation.path)) {
|
||||
[match, id, valueName] = operation.path.match(re);
|
||||
this.replace(id, valueName, operation.value);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
preprocessRessource(corpus) {
|
||||
return {id: corpus.id,
|
||||
status: corpus.status,
|
||||
description: corpus.description,
|
||||
title: corpus.title};
|
||||
}
|
||||
}
|
||||
CorpusList.options = {
|
||||
item: `<tr>
|
||||
<td><a class="btn-floating disabled"><i class="material-icons service-color darken" data-service="corpus-analysis">book</i></a></td>
|
||||
<td><b class="title"></b><br><i class="description"></i></td>
|
||||
<td><span class="badge new status status-color status-text" data-badge-caption=""></span></td>
|
||||
<td class="right-align">
|
||||
<a class="action-button btn-floating red tooltipped waves-effect waves-light" data-action="delete" data-position="top" data-tooltip="Delete"><i class="material-icons">delete</i></a>
|
||||
<a class="action-button btn-floating tooltipped waves-effect waves-light" data-action="view" data-position="top" data-tooltip="View"><i class="material-icons">send</i></a>
|
||||
</td>
|
||||
</tr>`,
|
||||
valueNames: [{data: ['id']}, {name: 'status', attr: 'data-status'}, 'description', 'title']
|
||||
};
|
41
app/static/js/nopaque/RessourceLists/JobInputList.js
Normal file
@ -0,0 +1,41 @@
|
||||
class JobInputList extends RessourceList {
|
||||
constructor(listElement, options = {}) {
|
||||
super(listElement, {...JobInputList.options, ...options});
|
||||
this.jobId = listElement.dataset.jobId;
|
||||
this.user.eventListeners.job.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), this.jobId);
|
||||
}
|
||||
|
||||
init() {
|
||||
super.init(this.user.data.jobs[this.jobId].inputs);
|
||||
}
|
||||
|
||||
onclick(event) {
|
||||
let ressourceElement = event.target.closest('tr');
|
||||
if (ressourceElement === null) {return;}
|
||||
let jobInputId = ressourceElement.dataset.id;
|
||||
let actionButtonElement = event.target.closest('.action-button');
|
||||
if (actionButtonElement === null) {return;}
|
||||
let action = actionButtonElement.dataset.action;
|
||||
switch (action) {
|
||||
case 'download':
|
||||
window.location.href = this.user.data.jobs[this.jobId].inputs[jobInputId].download_url;
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown action: "${action}"`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
preprocessRessource(jobInput) {
|
||||
return {id: jobInput.id, filename: jobInput.filename};
|
||||
}
|
||||
}
|
||||
JobInputList.options = {
|
||||
item: `<tr>
|
||||
<td><span class="filename"></span></td>
|
||||
<td class="right-align">
|
||||
<a class="action-button btn-floating tooltipped waves-effect waves-light" data-action="download" data-position="top" data-tooltip="View"><i class="material-icons">file_download</i></a>
|
||||
</td>
|
||||
</tr>`,
|
||||
valueNames: [{data: ['id']}, 'filename']
|
||||
};
|
94
app/static/js/nopaque/RessourceLists/JobList.js
Normal file
@ -0,0 +1,94 @@
|
||||
class JobList extends RessourceList {
|
||||
constructor(listElement, options = {}) {
|
||||
super(listElement, {...JobList.options, ...options});
|
||||
this.user.eventListeners.job.addEventListener((eventType, payload) => this.eventHandler(eventType, payload));
|
||||
}
|
||||
|
||||
init() {
|
||||
super.init(this.user.data.jobs);
|
||||
}
|
||||
|
||||
onclick(event) {
|
||||
let ressourceElement = event.target.closest('tr');
|
||||
if (ressourceElement === null) {return;}
|
||||
let jobId = ressourceElement.dataset.id;
|
||||
let actionButtonElement = event.target.closest('.action-button');
|
||||
let action = actionButtonElement === null ? 'view' : actionButtonElement.dataset.action;
|
||||
switch (action) {
|
||||
case 'delete':
|
||||
let deleteModalHTML = `<div class="modal">
|
||||
<div class="modal-content">
|
||||
<h4>Confirm job deletion</h4>
|
||||
<p>Do you really want to delete the job <b>${this.user.data.jobs[jobId].title}</b>? All files will be permanently deleted!</p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#!" class="btn modal-close waves-effect waves-light">Cancel</a>
|
||||
<a class="btn modal-close red waves-effect waves-light" href="${this.user.data.jobs[jobId].url}/delete"><i class="material-icons left">delete</i>Delete</a>
|
||||
</div>
|
||||
</div>`;
|
||||
let deleteModalParentElement = document.querySelector('#modals');
|
||||
deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML);
|
||||
let deleteModalElement = deleteModalParentElement.lastChild;
|
||||
let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}});
|
||||
deleteModal.open();
|
||||
break;
|
||||
case 'view':
|
||||
if (jobId !== '-1') {window.location.href = this.user.data.jobs[jobId].url;}
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown action: "${action}"`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
patch(patch) {
|
||||
let id, match, re, valueName;
|
||||
for (let operation of patch) {
|
||||
switch(operation.op) {
|
||||
case 'add':
|
||||
// Matches the only paths that should be handled here: /jobs/{jobId}
|
||||
re = /^\/jobs\/(\d+)$/;
|
||||
if (re.test(operation.path)) {this.add(operation.value);}
|
||||
break;
|
||||
case 'remove':
|
||||
// See case add ;)
|
||||
re = /^\/jobs\/(\d+)$/;
|
||||
if (re.test(operation.path)) {
|
||||
[match, id] = operation.path.match(re);
|
||||
this.remove(id);
|
||||
}
|
||||
break;
|
||||
case 'replace':
|
||||
// Matches the only paths that should be handled here: /jobs/{jobId}/{service || status || description || title}
|
||||
re = /^\/jobs\/(\d+)\/(service|status|description|title)$/;
|
||||
if (re.test(operation.path)) {
|
||||
[match, id, valueName] = operation.path.match(re);
|
||||
this.replace(id, valueName, operation.value);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
preprocessRessource(job) {
|
||||
return {id: job.id,
|
||||
service: job.service,
|
||||
status: job.status,
|
||||
description: job.description,
|
||||
title: job.title};
|
||||
}
|
||||
}
|
||||
JobList.options = {
|
||||
item: `<tr>
|
||||
<td><a class="btn-floating disabled"><i class="nopaque-icons service service-color darken service-icon"></i></a></td>
|
||||
<td><b class="title"></b><br><i class="description"></i></td>
|
||||
<td><span class="badge new status status-color status-text" data-badge-caption=""></span></td>
|
||||
<td class="right-align">
|
||||
<a class="action-button btn-floating red tooltipped waves-effect waves-light" data-action="delete" data-position="top" data-tooltip="Delete"><i class="material-icons">delete</i></a>
|
||||
<a class="action-button btn-floating tooltipped waves-effect waves-light" data-action="view" data-position="top" data-tooltip="View"><i class="material-icons">send</i></a>
|
||||
</td>
|
||||
</tr>`,
|
||||
valueNames: [{data: ['id']}, {name: 'service', attr: 'data-service'}, {name: 'status', attr: 'data-status'}, 'description', 'title']
|
||||
};
|
71
app/static/js/nopaque/RessourceLists/JobResultList.js
Normal file
@ -0,0 +1,71 @@
|
||||
class JobResultList extends RessourceList {
|
||||
constructor(listElement, options = {}) {
|
||||
super(listElement, {...JobResultList.options, ...options});
|
||||
this.jobId = listElement.dataset.jobId;
|
||||
this.user.eventListeners.job.addEventListener((eventType, payload) => this.eventHandler(eventType, payload), this.jobId);
|
||||
}
|
||||
|
||||
init() {
|
||||
super.init(this.user.data.jobs[this.jobId].results);
|
||||
}
|
||||
|
||||
onclick(event) {
|
||||
let ressourceElement = event.target.closest('tr');
|
||||
if (ressourceElement === null) {return;}
|
||||
let jobResultId = ressourceElement.dataset.id;
|
||||
let actionButtonElement = event.target.closest('.action-button');
|
||||
if (actionButtonElement === null) {return;}
|
||||
let action = actionButtonElement.dataset.action;
|
||||
switch (action) {
|
||||
case 'download':
|
||||
window.location.href = this.user.data.jobs[this.jobId].results[jobResultId].download_url;
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown action: "${action}"`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
patch(patch) {
|
||||
let re;
|
||||
for (let operation of patch) {
|
||||
switch(operation.op) {
|
||||
case 'add':
|
||||
// Matches the only paths that should be handled here: /jobs/{this.user.data.jobs[this.jobId].id}/results/{jobResultId}
|
||||
re = new RegExp('^/jobs/' + this.user.data.jobs[this.jobId].id + '/results/(\\d+)$');
|
||||
if (re.test(operation.path)) {this.add(operation.value);}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
preprocessRessource(jobResult) {
|
||||
let description;
|
||||
if (jobResult.filename.endsWith('.pdf.zip')) {
|
||||
description = 'PDF files with text layer';
|
||||
} else if (jobResult.filename.endsWith('.txt.zip')) {
|
||||
description = 'Raw text files';
|
||||
} else if (jobResult.filename.endsWith('.vrt.zip')) {
|
||||
description = 'VRT compliant files including the NLP data';
|
||||
} else if (jobResult.filename.endsWith('.xml.zip')) {
|
||||
description = 'TEI compliant files';
|
||||
} else if (jobResult.filename.endsWith('.poco.zip')) {
|
||||
description = 'HOCR and image files for post correction (PoCo)';
|
||||
} else {
|
||||
description = 'All result files created during this job';
|
||||
}
|
||||
return {id: jobResult.id, description: description, filename: jobResult.filename};
|
||||
}
|
||||
}
|
||||
JobResultList.options = {
|
||||
item: `<tr>
|
||||
<td><span class="description"></span></td>
|
||||
<td><span class="filename"></span></td>
|
||||
<td class="right-align">
|
||||
<a class="action-button btn-floating tooltipped waves-effect waves-light" data-action="download" data-position="top" data-tooltip="View"><i class="material-icons">file_download</i></a>
|
||||
</td>
|
||||
</tr>`,
|
||||
valueNames: [{data: ['id']}, 'description', 'filename']
|
||||
};
|
93
app/static/js/nopaque/RessourceLists/QueryResultList.js
Normal file
@ -0,0 +1,93 @@
|
||||
class QueryResultList extends RessourceList {
|
||||
constructor(listElement, options = {}) {
|
||||
super(listElement, {...QueryResultList.options, ...options});
|
||||
this.user.eventListeners.queryResult.addEventListener((eventType, payload) => this.eventHandler(eventType, payload));
|
||||
}
|
||||
|
||||
init() {
|
||||
super.init(this.user.data.query_results);
|
||||
}
|
||||
|
||||
onclick(event) {
|
||||
let ressourceElement = event.target.closest('tr');
|
||||
if (ressourceElement === null) {return;}
|
||||
let queryResultId = ressourceElement.dataset.id;
|
||||
let actionButtonElement = event.target.closest('.action-button');
|
||||
let action = actionButtonElement === null ? 'view' : actionButtonElement.dataset.action;
|
||||
switch (action) {
|
||||
case 'delete':
|
||||
let deleteModalHTML = `<div class="modal">
|
||||
<div class="modal-content">
|
||||
<h4>Confirm query result deletion</h4>
|
||||
<p>Do you really want to delete the query result <b>${this.user.data.query_results[queryResultId].title}</b>? It will be permanently deleted!</p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#!" class="btn modal-close waves-effect waves-light">Cancel</a>
|
||||
<a class="btn modal-close red waves-effect waves-light" href="${this.user.data.query_results[queryResultId].url}/delete"><i class="material-icons left">delete</i>Delete</a>
|
||||
</div>
|
||||
</div>`;
|
||||
let deleteModalParentElement = document.querySelector('#modals');
|
||||
deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML);
|
||||
let deleteModalElement = deleteModalParentElement.lastChild;
|
||||
let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}});
|
||||
deleteModal.open();
|
||||
break;
|
||||
case 'view':
|
||||
if (queryResultId !== '-1') {window.location.href = this.user.data.query_results[queryResultId].url;}
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown action: "${action}"`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
patch(patch) {
|
||||
let id, match, re, valueName;
|
||||
for (let operation of patch) {
|
||||
switch(operation.op) {
|
||||
case 'add':
|
||||
// Matches the only paths that should be handled here: /jobs/{jobId}
|
||||
re = /^\/query_results\/(\d+)$/;
|
||||
if (re.test(operation.path)) {this.add(operation.value);}
|
||||
break;
|
||||
case 'remove':
|
||||
// See case add ;)
|
||||
re = /^\/query_results\/(\d+)$/;
|
||||
if (re.test(operation.path)) {
|
||||
[match, id] = operation.path.match(re);
|
||||
this.remove(id);
|
||||
}
|
||||
break;
|
||||
case 'replace':
|
||||
// Matches the only paths that should be handled here: /jobs/{jobId}/{service || status || description || title}
|
||||
re = /^\/query_results\/(\d+)\/(corpus_title|description|query|title)$/;
|
||||
if (re.test(operation.path)) {
|
||||
[match, id, valueName] = operation.path.match(re);
|
||||
this.replace(id, valueName, operation.value);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
preprocessRessource(queryResult) {
|
||||
return {id: queryResult.id,
|
||||
corpus_title: queryResult.corpus_title,
|
||||
description: queryResult.description,
|
||||
query: queryResult.query,
|
||||
title: queryResult.title};
|
||||
}
|
||||
}
|
||||
QueryResultList.options = {
|
||||
item: `<tr>
|
||||
<td><b class="title"></b><br><i class="description"></i><br></td>
|
||||
<td><span class="corpus_title"></span><br><span class="query"></span></td>
|
||||
<td class="right-align">
|
||||
<a class="action-button btn-floating red tooltipped waves-effect waves-light" data-action="delete" data-position="top" data-tooltip="Delete"><i class="material-icons">delete</i></a>
|
||||
<a class="action-button btn-floating tooltipped waves-effect waves-light" data-action="view" data-position="top" data-tooltip="View"><i class="material-icons">send</i></a>
|
||||
</td>
|
||||
</tr>`,
|
||||
valueNames: [{data: ['id']}, 'corpus_title', 'description', 'query', 'title']
|
||||
};
|
99
app/static/js/nopaque/RessourceLists/RessourceList.js
Normal file
@ -0,0 +1,99 @@
|
||||
class RessourceList {
|
||||
/* A wrapper class for the list.js list.
|
||||
* This class is not meant to be used directly, instead it should be used as
|
||||
* a base class for concrete ressource list implementations.
|
||||
*/
|
||||
constructor(listElement, options = {}) {
|
||||
if (listElement.dataset.userId) {
|
||||
if (listElement.dataset.userId in nopaque.appClient.users) {
|
||||
this.user = nopaque.appClient.users[listElement.dataset.userId];
|
||||
} else {
|
||||
console.error(`User not found: ${listElement.dataset.userId}`);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
this.user = nopaque.appClient.users.self;
|
||||
}
|
||||
this.list = new List(listElement, {...RessourceList.options, ...options});
|
||||
this.list.list.innerHTML = `<tr>
|
||||
<td class="row" colspan="100%">
|
||||
<div class="col s12"> </div>
|
||||
<div class="col s3 m2 xl1">
|
||||
<div class="preloader-wrapper active">
|
||||
<div class="spinner-layer spinner-green-only">
|
||||
<div class="circle-clipper left">
|
||||
<div class="circle"></div>
|
||||
</div>
|
||||
<div class="gap-patch">
|
||||
<div class="circle"></div>
|
||||
</div>
|
||||
<div class="circle-clipper right">
|
||||
<div class="circle"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col s9 m6 xl5">
|
||||
<span class="card-title">Waiting for data...</span>
|
||||
<p>This list is not initialized yet.</p>
|
||||
</div>
|
||||
</td>
|
||||
</tr>`;
|
||||
this.list.list.style.cursor = 'pointer';
|
||||
if (typeof this.onclick === 'function') {this.list.list.addEventListener('click', event => this.onclick(event));}
|
||||
}
|
||||
|
||||
eventHandler(eventType, payload) {
|
||||
switch (eventType) {
|
||||
case 'init':
|
||||
this.init();
|
||||
break;
|
||||
case 'patch':
|
||||
this.patch(payload);
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown event type: ${eventType}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
init(ressources) {
|
||||
this.list.clear();
|
||||
this.add(Object.values(ressources));
|
||||
this.list.sort('id', {order: 'desc'});
|
||||
let emptyListElementHTML = `<tr class="show-if-only-child" data-id="-1">
|
||||
<td colspan="100%">
|
||||
<span class="card-title"><i class="left material-icons" style="font-size: inherit;">file_download</i>Nothing here...</span>
|
||||
<p>No ressource available.</p>
|
||||
</td>
|
||||
</tr>`;
|
||||
this.list.list.insertAdjacentHTML('afterbegin', emptyListElementHTML);
|
||||
}
|
||||
|
||||
patch(patch) {
|
||||
/*
|
||||
* It's not possible to generalize a patch Handler for all type of
|
||||
* ressources. So this method is meant to be an interface.
|
||||
*/
|
||||
console.error('patch method not implemented!');
|
||||
}
|
||||
|
||||
add(values) {
|
||||
let ressources = Array.isArray(values) ? values : [values];
|
||||
if (typeof this.preprocessRessource === 'function') {
|
||||
ressources = ressources.map(ressource => this.preprocessRessource(ressource));
|
||||
}
|
||||
// Set a callback function ('() => {return;}') to force List.js perform the
|
||||
// add method asynchronous: https://listjs.com/api/#add
|
||||
this.list.add(ressources, () => {return;});
|
||||
}
|
||||
|
||||
remove(id) {
|
||||
this.list.remove('id', id);
|
||||
}
|
||||
|
||||
replace(id, valueName, newValue) {
|
||||
this.list.get('id', id)[0].values({[valueName]: newValue});
|
||||
}
|
||||
}
|
||||
RessourceList.options = {page: 5, pagination: [{innerWindow: 4, outerWindow: 1}]};
|
71
app/static/js/nopaque/RessourceLists/UserList.js
Normal file
@ -0,0 +1,71 @@
|
||||
class UserList extends RessourceList {
|
||||
constructor(listElement, options = {}) {
|
||||
super(listElement, {...UserList.options, ...options});
|
||||
users = undefined;
|
||||
}
|
||||
|
||||
init(users) {
|
||||
this.users = users;
|
||||
super.init(users);
|
||||
}
|
||||
|
||||
onclick(event) {
|
||||
let ressourceElement = event.target.closest('tr');
|
||||
if (ressourceElement === null) {return;}
|
||||
let userId = ressourceElement.dataset.id;
|
||||
let actionButtonElement = event.target.closest('.action-button');
|
||||
let action = (actionButtonElement === null) ? 'view' : actionButtonElement.dataset.action;
|
||||
switch (action) {
|
||||
case 'delete':
|
||||
let deleteModalHTML = `<div class="modal">
|
||||
<div class="modal-content">
|
||||
<h4>Confirm user deletion</h4>
|
||||
<p>Do you really want to delete the corpus <b>${this.users[userId].username}</b>? All files will be permanently deleted!</p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#!" class="btn modal-close waves-effect waves-light">Cancel</a>
|
||||
<a class="btn modal-close red waves-effect waves-light" href="/admin/users/${userId}/delete"><i class="material-icons left">delete</i>Delete</a>
|
||||
</div>
|
||||
</div>`;
|
||||
let deleteModalParentElement = document.querySelector('#modals');
|
||||
deleteModalParentElement.insertAdjacentHTML('beforeend', deleteModalHTML);
|
||||
let deleteModalElement = deleteModalParentElement.lastChild;
|
||||
let deleteModal = M.Modal.init(deleteModalElement, {onCloseEnd: () => {deleteModal.destroy(); deleteModalElement.remove();}});
|
||||
deleteModal.open();
|
||||
break;
|
||||
case 'edit':
|
||||
window.location.href = `/admin/users/${userId}/edit`;
|
||||
break;
|
||||
case 'view':
|
||||
if (userId !== '-1') {window.location.href = `/admin/users/${userId}`;}
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown action: ${action}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
preprocessRessource(user) {
|
||||
return {id: user.id,
|
||||
id_: user.id,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
last_seen: new Date(user.last_seen * 1000).toLocaleString("en-US"),
|
||||
role: user.role.name};
|
||||
}
|
||||
}
|
||||
UserList.options = {
|
||||
item: `<tr>
|
||||
<td><span class="id_"></span></td>
|
||||
<td><span class="username"></span></td>
|
||||
<td><span class="email"></span></td>
|
||||
<td><span class="last_seen"></span></td>
|
||||
<td><span class="role"></span></td>
|
||||
<td class="right-align">
|
||||
<a class="action-button btn-floating red tooltipped waves-effect waves-light" data-action="delete" data-position="top" data-tooltip="Delete"><i class="material-icons">delete</i></a>
|
||||
<a class="action-button btn-floating tooltipped waves-effect waves-light" data-action="edit" data-position="top" data-tooltip="Edit"><i class="material-icons">edit</i></a>
|
||||
<a class="action-button btn-floating tooltipped waves-effect waves-light" data-action="view" data-position="top" data-tooltip="View"><i class="material-icons">send</i></a>
|
||||
</td>
|
||||
</tr>`,
|
||||
valueNames: [{data: ['id']}, 'id_', 'username', 'email', 'last_seen', 'role']
|
||||
};
|
250
app/static/js/nopaque/main.js
Normal file
@ -0,0 +1,250 @@
|
||||
class AppClient {
|
||||
constructor(currentUserId) {
|
||||
if (currentUserId) {
|
||||
this.socket = io({transports: ['websocket']});
|
||||
this.users = {};
|
||||
this.users.self = this.loadUser(currentUserId);
|
||||
this.users.self.eventListeners.job.addEventListener((eventType, payload) => this.jobEventHandler(eventType, payload));
|
||||
}
|
||||
}
|
||||
|
||||
flash(message, category) {
|
||||
let toast;
|
||||
let toastCloseActionElement;
|
||||
|
||||
switch (category) {
|
||||
case "corpus":
|
||||
message = `<i class="left material-icons">book</i>${message}`;
|
||||
break;
|
||||
case "error":
|
||||
message = `<i class="left material-icons error-color-text">error</i>${message}`;
|
||||
break;
|
||||
case "job":
|
||||
message = `<i class="left nopaque-icons">J</i>${message}`;
|
||||
break;
|
||||
default:
|
||||
message = `<i class="left material-icons">notifications</i>${message}`;
|
||||
}
|
||||
|
||||
toast = M.toast({html: `<span>${message}</span>
|
||||
<button data-action="close" class="btn-flat toast-action white-text">
|
||||
<i class="material-icons">close</i>
|
||||
</button>`});
|
||||
toastCloseActionElement = toast.el.querySelector('.toast-action[data-action="close"]');
|
||||
toastCloseActionElement.addEventListener('click', () => {toast.dismiss();});
|
||||
}
|
||||
|
||||
jobEventHandler(eventType, payload) {
|
||||
switch (eventType) {
|
||||
case 'init':
|
||||
break;
|
||||
case 'patch':
|
||||
this.jobPatch(payload);
|
||||
break;
|
||||
default:
|
||||
console.error(`[AppClient.jobEventHandler] Unknown event type: ${eventType}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
loadUser(userId) {
|
||||
if (userId in this.users) {return this.users[userId];}
|
||||
let user = new User();
|
||||
this.users[userId] = user;
|
||||
this.socket.on(`user_${userId}_init`, msg => user.init(msg));
|
||||
this.socket.on(`user_${userId}_patch`, msg => user.patch(msg));
|
||||
this.socket.emit('start_user_session', userId);
|
||||
return user;
|
||||
}
|
||||
|
||||
jobPatch(patch) {
|
||||
if (this.users.self.data.settings.job_status_site_notifications === 'none') {return;}
|
||||
let jobStatusPatches = patch.filter(operation => operation.op === 'replace' && /^\/jobs\/(\d+)\/status$/.test(operation.path));
|
||||
for (let operation of jobStatusPatches) {
|
||||
let [match, jobId] = operation.path.match(/^\/jobs\/(\d+)\/status$/);
|
||||
if (this.users.self.data.settings.job_status_site_notifications === "end" && !['complete', 'failed'].includes(operation.value)) {continue;}
|
||||
this.flash(`[<a href="/jobs/${jobId}">${this.users.self.data.jobs[jobId].title}</a>] New status: ${operation.value}`, 'job');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class User {
|
||||
constructor() {
|
||||
this.data = undefined;
|
||||
this.eventListeners = {
|
||||
corpus: {
|
||||
addEventListener(listener, corpusId='*') {
|
||||
if (corpusId in this) {this[corpusId].push(listener);} else {this[corpusId] = [listener];}
|
||||
}
|
||||
},
|
||||
job: {
|
||||
addEventListener(listener, jobId='*') {
|
||||
if (jobId in this) {this[jobId].push(listener);} else {this[jobId] = [listener];}
|
||||
}
|
||||
},
|
||||
queryResult: {
|
||||
addEventListener(listener, queryResultId='*') {
|
||||
if (queryResultId in this) {this[queryResultId].push(listener);} else {this[queryResultId] = [listener];}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
init(data) {
|
||||
this.data = data;
|
||||
|
||||
for (let [corpusId, eventListeners] of Object.entries(this.eventListeners.corpus)) {
|
||||
if (corpusId === '*') {
|
||||
for (let eventListener of eventListeners) {eventListener('init');}
|
||||
} else {
|
||||
if (corpusId in this.data.corpora) {
|
||||
for (let eventListener of eventListeners) {eventListener('init');}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let [jobId, eventListeners] of Object.entries(this.eventListeners.job)) {
|
||||
if (jobId === '*') {
|
||||
for (let eventListener of eventListeners) {eventListener('init');}
|
||||
} else {
|
||||
if (jobId in this.data.jobs) {
|
||||
for (let eventListener of eventListeners) {eventListener('init');}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let [queryResultId, eventListeners] of Object.entries(this.eventListeners.queryResult)) {
|
||||
if (queryResultId === '*') {
|
||||
for (let eventListener of eventListeners) {eventListener('init');}
|
||||
} else {
|
||||
if (queryResultId in this.data.query_results) {
|
||||
for (let eventListener of eventListeners) {eventListener('init');}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
patch(patch) {
|
||||
this.data = jsonpatch.apply_patch(this.data, patch);
|
||||
|
||||
let corporaPatch = patch.filter(operation => operation.path.startsWith("/corpora"));
|
||||
if (corporaPatch.length > 0) {
|
||||
for (let [corpusId, eventListeners] of Object.entries(this.eventListeners.corpus)) {
|
||||
if (corpusId === '*') {
|
||||
for (let eventListener of eventListeners) {eventListener('patch', corporaPatch);}
|
||||
} else {
|
||||
let corpusPatch = corporaPatch.filter(operation => operation.path.startsWith(`/corpora/${corpusId}`));
|
||||
if (corpusPatch.length > 0) {
|
||||
for (let eventListener of eventListeners) {eventListener('patch', corpusPatch);}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let jobsPatch = patch.filter(operation => operation.path.startsWith("/jobs"));
|
||||
if (jobsPatch.length > 0) {
|
||||
for (let [jobId, eventListeners] of Object.entries(this.eventListeners.job)) {
|
||||
if (jobId === '*') {
|
||||
for (let eventListener of eventListeners) {eventListener('patch', jobsPatch);}
|
||||
} else {
|
||||
let jobPatch = jobsPatch.filter(operation => operation.path.startsWith(`/jobs/${jobId}`));
|
||||
if (jobPatch.length > 0) {
|
||||
for (let eventListener of eventListeners) {eventListener('patch', jobPatch);}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let queryResultsPatch = patch.filter(operation => operation.path.startsWith("/query_results"));
|
||||
if (queryResultsPatch.length > 0) {
|
||||
for (let [queryResultId, eventListeners] of Object.entries(this.eventListeners.queryResult)) {
|
||||
if (queryResultId === '*') {
|
||||
for (let eventListener of eventListeners) {eventListener('patch', queryResultsPatch);}
|
||||
} else {
|
||||
let queryResultPatch = queryResultsPatch.filter(operation => operation.path.startsWith(`/query_results/${queryResultId}`));
|
||||
if (queryResultPatch.length > 0) {
|
||||
for (let eventListener of eventListeners) {eventListener('patch', queryResultPatch);}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* The nopaque object is used as a namespace for nopaque specific functions and
|
||||
* variables.
|
||||
*/
|
||||
var nopaque = {};
|
||||
|
||||
nopaque.Forms = {};
|
||||
nopaque.Forms.init = function() {
|
||||
var abortRequestElement, parentElement, progressElement, progressModal,
|
||||
progressModalElement, request, submitElement;
|
||||
|
||||
for (let form of document.querySelectorAll(".nopaque-submit-form")) {
|
||||
submitElement = form.querySelector('button[type="submit"]');
|
||||
submitElement.addEventListener("click", function() {
|
||||
for (let selectElement of form.querySelectorAll('select')) {
|
||||
if (selectElement.value === "") {
|
||||
parentElement = selectElement.closest(".input-field");
|
||||
parentElement.querySelector(".select-dropdown").classList.add("invalid");
|
||||
for (let helperTextElement of parentElement.querySelectorAll(".helper-text")) {
|
||||
helperTextElement.remove();
|
||||
}
|
||||
parentElement.insertAdjacentHTML("beforeend", `<span class="helper-text red-text">Please select an option.</span>`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
request = new XMLHttpRequest();
|
||||
if (form.dataset.hasOwnProperty("progressModal")) {
|
||||
progressModalElement = document.getElementById(form.dataset.progressModal);
|
||||
progressModal = M.Modal.getInstance(progressModalElement);
|
||||
progressModal.options.dismissible = false;
|
||||
abortRequestElement = progressModalElement.querySelector(".abort-request");
|
||||
abortRequestElement.addEventListener("click", function() {request.abort();});
|
||||
progressElement = progressModalElement.querySelector(".determinate");
|
||||
}
|
||||
form.addEventListener("submit", function(event) {
|
||||
event.preventDefault();
|
||||
var formData;
|
||||
|
||||
formData = new FormData(form);
|
||||
// Initialize progress modal
|
||||
if (progressModalElement) {
|
||||
progressElement.style.width = "0%";
|
||||
progressModal.open();
|
||||
}
|
||||
request.open("POST", window.location.href);
|
||||
request.send(formData);
|
||||
});
|
||||
request.addEventListener("load", function(event) {
|
||||
var fieldElement;
|
||||
|
||||
if (request.status === 201) {
|
||||
window.location.href = JSON.parse(this.responseText).redirect_url;
|
||||
}
|
||||
if (request.status === 400) {
|
||||
for (let [field, errors] of Object.entries(JSON.parse(this.responseText))) {
|
||||
fieldElement = form.querySelector(`input[name$="${field}"]`).closest(".input-field");
|
||||
for (let error of errors) {
|
||||
fieldElement.insertAdjacentHTML("beforeend", `<span class="helper-text red-text">${error}</span>`);
|
||||
}
|
||||
}
|
||||
if (progressModalElement) {
|
||||
progressModal.close();
|
||||
}
|
||||
}
|
||||
if (request.status === 500) {
|
||||
location.reload();
|
||||
}
|
||||
});
|
||||
if (progressModalElement) {
|
||||
request.upload.addEventListener("progress", function(event) {
|
||||
progressElement.style.width = Math.floor(100 * event.loaded / event.total).toString() + "%";
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
7
app/static/js/socket.io.min.js
vendored
Normal file
1
app/static/js/socket.io.min.js.map
Normal file
62
app/static/json_schema/combined_ners_tags.txt
Normal file
@ -0,0 +1,62 @@
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER",
|
||||
"EVENT",
|
||||
"GPE",
|
||||
"LOC",
|
||||
"ORG",
|
||||
"PERSON",
|
||||
"PRODUCT",
|
||||
"CARDINAL",
|
||||
"DATE",
|
||||
"EVENT",
|
||||
"FAC",
|
||||
"GPE",
|
||||
"LANGUAGE",
|
||||
"LAW",
|
||||
"LOC",
|
||||
"MONEY",
|
||||
"NORP",
|
||||
"ORDINAL",
|
||||
"ORG",
|
||||
"PERCENT",
|
||||
"PERSON",
|
||||
"PRODUCT",
|
||||
"QUANTITY",
|
||||
"TIME",
|
||||
"WORK_OF_ART",
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER",
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER",
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER",
|
||||
"CARDINAL",
|
||||
"DATE",
|
||||
"EVENT",
|
||||
"FAC",
|
||||
"GPE",
|
||||
"LANGUAGE",
|
||||
"LAW",
|
||||
"LOC",
|
||||
"MONEY",
|
||||
"NORP",
|
||||
"ORDINAL",
|
||||
"ORG",
|
||||
"PERCENT",
|
||||
"PERSON",
|
||||
"PRODUCT",
|
||||
"QUANTITY",
|
||||
"TIME",
|
||||
"WORK_OF_ART",
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER"
|
329
app/static/json_schema/nopaque_cqi_py_results_schema.json
Normal file
@ -0,0 +1,329 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-06/schema#",
|
||||
"$ref": "#/definitions/NopaqueCQIPYResults",
|
||||
"definitions": {
|
||||
"NopaqueCQIPYResults": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"matches": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Match"
|
||||
}
|
||||
},
|
||||
"cpos_lookup": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/CposLookup"
|
||||
}
|
||||
},
|
||||
"text_lookup": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/CorpusAllText"
|
||||
}
|
||||
},
|
||||
"match_count": {
|
||||
"type": "integer"
|
||||
},
|
||||
"corpus_type": {
|
||||
"type": "string"
|
||||
},
|
||||
"query": {
|
||||
"type": "string"
|
||||
},
|
||||
"corpus_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"corpus_description": {
|
||||
"type": "string"
|
||||
},
|
||||
"corpus_creation_date": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"corpus_last_edited_date": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"corpus_properties": {
|
||||
"type": "array",
|
||||
"items": {}
|
||||
},
|
||||
"corpus_size_tokens": {
|
||||
"type": "integer"
|
||||
},
|
||||
"corpus_all_texts": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/CorpusAllText"
|
||||
}
|
||||
},
|
||||
"corpus_analysis_date": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"corpus_cqi_py_protocol_version": {
|
||||
"type": "string"
|
||||
},
|
||||
"corpus_cqi_py_package_version": {
|
||||
"type": "string"
|
||||
},
|
||||
"corpus_cqpserver_version": {
|
||||
"type": "string"
|
||||
},
|
||||
"fullContext": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"cpos_ranges": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"corpus_all_texts",
|
||||
"corpus_analysis_date",
|
||||
"corpus_cqi_py_package_version",
|
||||
"corpus_cqi_py_protocol_version",
|
||||
"corpus_cqpserver_version",
|
||||
"corpus_creation_date",
|
||||
"corpus_description",
|
||||
"corpus_last_edited_date",
|
||||
"corpus_name",
|
||||
"corpus_properties",
|
||||
"corpus_size_tokens",
|
||||
"corpus_type",
|
||||
"cpos_lookup",
|
||||
"cpos_ranges",
|
||||
"match_count",
|
||||
"matches",
|
||||
"query",
|
||||
"text_lookup",
|
||||
"fullContext"
|
||||
],
|
||||
"title": "NopaqueCQIPYResults"
|
||||
},
|
||||
"CorpusAllText": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"address": {
|
||||
"type": "string"
|
||||
},
|
||||
"author": {
|
||||
"type": "string"
|
||||
},
|
||||
"booktitle": {
|
||||
"type": "string"
|
||||
},
|
||||
"chapter": {
|
||||
"type": "string"
|
||||
},
|
||||
"editor": {
|
||||
"type": "string"
|
||||
},
|
||||
"institution": {
|
||||
"type": "string"
|
||||
},
|
||||
"journal": {
|
||||
"type": "string"
|
||||
},
|
||||
"pages": {
|
||||
"type": "string"
|
||||
},
|
||||
"publisher": {
|
||||
"type": "string"
|
||||
},
|
||||
"publishing_year": {
|
||||
"type": "string",
|
||||
"format": "integer"
|
||||
},
|
||||
"school": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"match_count": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"address",
|
||||
"author",
|
||||
"booktitle",
|
||||
"chapter",
|
||||
"editor",
|
||||
"institution",
|
||||
"journal",
|
||||
"pages",
|
||||
"publisher",
|
||||
"publishing_year",
|
||||
"school",
|
||||
"title"
|
||||
],
|
||||
"title": "CorpusAllText"
|
||||
},
|
||||
"CposLookup": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"word": {
|
||||
"type": "string"
|
||||
},
|
||||
"lemma": {
|
||||
"type": "string"
|
||||
},
|
||||
"simple_pos": {
|
||||
"$ref": "#/definitions/SimplePos"
|
||||
},
|
||||
"pos": {
|
||||
"type": "string"
|
||||
},
|
||||
"ner": {
|
||||
"$ref": "#/definitions/Ner"
|
||||
},
|
||||
"text": {
|
||||
"type": "integer"
|
||||
},
|
||||
"s": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"lemma",
|
||||
"ner",
|
||||
"pos",
|
||||
"s",
|
||||
"simple_pos",
|
||||
"text",
|
||||
"word"
|
||||
],
|
||||
"title": "CposLookup"
|
||||
},
|
||||
"Match": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"lc": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"c": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"rc": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"c",
|
||||
"lc",
|
||||
"rc"
|
||||
],
|
||||
"title": "Match"
|
||||
},
|
||||
"Ner": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"NULL",
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER",
|
||||
"EVENT",
|
||||
"GPE",
|
||||
"LOC",
|
||||
"ORG",
|
||||
"PERSON",
|
||||
"PRODUCT",
|
||||
"CARDINAL",
|
||||
"DATE",
|
||||
"EVENT",
|
||||
"FAC",
|
||||
"GPE",
|
||||
"LANGUAGE",
|
||||
"LAW",
|
||||
"LOC",
|
||||
"MONEY",
|
||||
"NORP",
|
||||
"ORDINAL",
|
||||
"ORG",
|
||||
"PERCENT",
|
||||
"PERSON",
|
||||
"PRODUCT",
|
||||
"QUANTITY",
|
||||
"TIME",
|
||||
"WORK_OF_ART",
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER",
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER",
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER",
|
||||
"CARDINAL",
|
||||
"DATE",
|
||||
"EVENT",
|
||||
"FAC",
|
||||
"GPE",
|
||||
"LANGUAGE",
|
||||
"LAW",
|
||||
"LOC",
|
||||
"MONEY",
|
||||
"NORP",
|
||||
"ORDINAL",
|
||||
"ORG",
|
||||
"PERCENT",
|
||||
"PERSON",
|
||||
"PRODUCT",
|
||||
"QUANTITY",
|
||||
"TIME",
|
||||
"WORK_OF_ART",
|
||||
"LOC",
|
||||
"MISC",
|
||||
"ORG",
|
||||
"PER"
|
||||
],
|
||||
"title": "Ner"
|
||||
},
|
||||
"SimplePos": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"ADJ",
|
||||
"ADP",
|
||||
"ADV",
|
||||
"AUX",
|
||||
"CONJ",
|
||||
"CCONJ",
|
||||
"DET",
|
||||
"INTJ",
|
||||
"NOUN",
|
||||
"NUM",
|
||||
"PART",
|
||||
"PRON",
|
||||
"PROPN",
|
||||
"PUNCT",
|
||||
"SCONJ",
|
||||
"SYM",
|
||||
"VERB",
|
||||
"X",
|
||||
"SPACE"
|
||||
],
|
||||
"title": "SimplePos"
|
||||
}
|
||||
}
|
||||
}
|
54
app/tasks/__init__.py
Normal file
@ -0,0 +1,54 @@
|
||||
from .corpus_utils import CheckCorporaMixin
|
||||
from .job_utils import CheckJobsMixin
|
||||
from ..import db, socketio
|
||||
import docker
|
||||
|
||||
|
||||
class TaskRunner(CheckCorporaMixin, CheckJobsMixin):
|
||||
def __init__(self):
|
||||
self.docker = docker.from_env()
|
||||
self._socketio_message_buffer = {}
|
||||
|
||||
def run(self):
|
||||
self.check_corpora()
|
||||
self.check_jobs()
|
||||
db.session.commit()
|
||||
self.flush_socketio_messages()
|
||||
|
||||
def buffer_socketio_message(self, event, payload, room,
|
||||
msg_id=None, override_policy='replace'):
|
||||
if room not in self._socketio_message_buffer:
|
||||
self._socketio_message_buffer[room] = {}
|
||||
if event not in self._socketio_message_buffer[room]:
|
||||
self._socketio_message_buffer[room][event] = {}
|
||||
if msg_id is None:
|
||||
msg_id = len(self._socketio_message_buffer[room][event].keys())
|
||||
if override_policy == 'append':
|
||||
if msg_id in self._socketio_message_buffer[room][event]:
|
||||
# If the current message value isn't a list, convert it!
|
||||
if not isinstance(self._socketio_message_buffer[room][event][msg_id], list): # noqa
|
||||
self._socketio_message_buffer[room][event][msg_id] = [self._socketio_message_buffer[room][event][msg_id]] # noqa
|
||||
else:
|
||||
self._socketio_message_buffer[room][event][msg_id] = []
|
||||
self._socketio_message_buffer[room][event][msg_id].append(payload)
|
||||
elif override_policy == 'replace':
|
||||
self._socketio_message_buffer[room][event][msg_id] = payload
|
||||
else:
|
||||
raise Exception('Unknown override policy: {}'.format(override_policy)) # noqa
|
||||
return msg_id
|
||||
|
||||
def buffer_user_patch_operation(self, ressource, patch_operation):
|
||||
self.buffer_socketio_message('user_{}_patch'.format(ressource.user_id),
|
||||
patch_operation,
|
||||
'user_{}'.format(ressource.user_id),
|
||||
msg_id='_', override_policy='append')
|
||||
|
||||
def clear_socketio_message_buffer(self):
|
||||
self._socketio_message_buffer = {}
|
||||
|
||||
def flush_socketio_messages(self):
|
||||
for room in self._socketio_message_buffer:
|
||||
for event in self._socketio_message_buffer[room]:
|
||||
for message in self._socketio_message_buffer[room][event]:
|
||||
socketio.emit(event, self._socketio_message_buffer[room][event][message], room=room) # noqa
|
||||
self.clear_socketio_message_buffer()
|
231
app/tasks/corpus_utils.py
Normal file
@ -0,0 +1,231 @@
|
||||
from ..models import Corpus
|
||||
import docker
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
||||
class CheckCorporaMixin:
|
||||
def check_corpora(self):
|
||||
corpora = Corpus.query.all()
|
||||
queued_corpora = list(filter(lambda corpus: corpus.status == 'queued', corpora)) # noqa
|
||||
running_corpora = list(filter(lambda corpus: corpus.status == 'running', corpora)) # noqa
|
||||
start_analysis_corpora = list(filter(lambda corpus: corpus.status == 'start analysis', corpora)) # noqa
|
||||
analysing_corpora = list(filter(lambda corpus: corpus.status == 'analysing', corpora)) # noqa
|
||||
stop_analysis_corpora = list(filter(lambda corpus: corpus.status == 'stop analysis', corpora)) # noqa
|
||||
submitted_corpora = list(filter(lambda corpus: corpus.status == 'submitted', corpora)) # noqa
|
||||
for corpus in submitted_corpora:
|
||||
self.create_build_corpus_service(corpus)
|
||||
for corpus in queued_corpora + running_corpora:
|
||||
self.checkout_build_corpus_service(corpus)
|
||||
for corpus in start_analysis_corpora:
|
||||
self.create_cqpserver_container(corpus)
|
||||
for corpus in analysing_corpora:
|
||||
self.checkout_analysing_corpus_container(corpus)
|
||||
for corpus in stop_analysis_corpora:
|
||||
self.remove_cqpserver_container(corpus)
|
||||
|
||||
def create_build_corpus_service(self, corpus):
|
||||
corpus_data_dir = os.path.join(corpus.path, 'data')
|
||||
shutil.rmtree(corpus_data_dir, ignore_errors=True)
|
||||
os.mkdir(corpus_data_dir)
|
||||
corpus_registry_dir = os.path.join(corpus.path, 'registry')
|
||||
shutil.rmtree(corpus_registry_dir, ignore_errors=True)
|
||||
os.mkdir(corpus_registry_dir)
|
||||
corpus_file = os.path.join(corpus.path, 'merged', 'corpus.vrt')
|
||||
service_kwargs = {
|
||||
'command': 'docker-entrypoint.sh build-corpus',
|
||||
'constraints': ['node.role==worker'],
|
||||
'labels': {'origin': 'nopaque',
|
||||
'type': 'corpus.build',
|
||||
'corpus_id': str(corpus.id)},
|
||||
'mounts': [corpus_file + ':/root/files/corpus.vrt:ro',
|
||||
corpus_data_dir + ':/corpora/data:rw',
|
||||
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
|
||||
'name': 'build-corpus_{}'.format(corpus.id),
|
||||
'restart_policy': docker.types.RestartPolicy()
|
||||
}
|
||||
service_image = \
|
||||
'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
|
||||
try:
|
||||
self.docker.services.create(service_image, **service_kwargs)
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Create "{}" service raised '.format(service_kwargs['name'])
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
else:
|
||||
corpus.status = 'queued'
|
||||
patch_operation = {
|
||||
'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}
|
||||
self.buffer_user_patch_operation(corpus, patch_operation)
|
||||
|
||||
def checkout_build_corpus_service(self, corpus):
|
||||
service_name = 'build-corpus_{}'.format(corpus.id)
|
||||
try:
|
||||
service = self.docker.services.get(service_name)
|
||||
except docker.errors.NotFound:
|
||||
logging.error(
|
||||
'Get "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.NotFound" The service does not exist. '
|
||||
+ '(corpus.status: {} -> failed)'.format(corpus.status)
|
||||
)
|
||||
corpus.status = 'failed'
|
||||
patch_operation = {
|
||||
'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}
|
||||
self.buffer_user_patch_operation(corpus, patch_operation)
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Get "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
except docker.errors.InvalidVersion:
|
||||
logging.error(
|
||||
'Get "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.InvalidVersion" One of the arguments is '
|
||||
+ 'not supported with the current API version.'
|
||||
)
|
||||
else:
|
||||
service_tasks = service.tasks()
|
||||
if not service_tasks:
|
||||
return
|
||||
task_state = service_tasks[0].get('Status').get('State')
|
||||
if corpus.status == 'queued' and task_state != 'pending':
|
||||
corpus.status = 'running'
|
||||
patch_operation = {
|
||||
'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}
|
||||
self.buffer_user_patch_operation(corpus, patch_operation)
|
||||
elif (corpus.status == 'running'
|
||||
and task_state in ['complete', 'failed']):
|
||||
try:
|
||||
service.remove()
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Remove "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
else:
|
||||
corpus.status = 'prepared' if task_state == 'complete' \
|
||||
else 'failed'
|
||||
patch_operation = {
|
||||
'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}
|
||||
self.buffer_user_patch_operation(corpus, patch_operation)
|
||||
|
||||
def create_cqpserver_container(self, corpus):
|
||||
corpus_data_dir = os.path.join(corpus.path, 'data')
|
||||
corpus_registry_dir = os.path.join(corpus.path, 'registry')
|
||||
container_kwargs = {
|
||||
'command': 'cqpserver',
|
||||
'detach': True,
|
||||
'volumes': [corpus_data_dir + ':/corpora/data:rw',
|
||||
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
|
||||
'name': 'cqpserver_{}'.format(corpus.id),
|
||||
'network': 'nopaque_default'
|
||||
}
|
||||
container_image = \
|
||||
'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
|
||||
# Check if a cqpserver container already exists. If this is the case,
|
||||
# remove it and create a new one
|
||||
try:
|
||||
container = self.docker.containers.get(container_kwargs['name'])
|
||||
except docker.errors.NotFound:
|
||||
pass
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Get "{}" container raised '.format(container_kwargs['name'])
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
else:
|
||||
try:
|
||||
container.remove(force=True)
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Remove "{}" container raised '.format(
|
||||
container_kwargs['name'])
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
try:
|
||||
self.docker.containers.run(container_image, **container_kwargs)
|
||||
except docker.errors.ContainerError:
|
||||
# This case should not occur, because detach is True.
|
||||
logging.error(
|
||||
'Run "{}" container raised '.format(container_kwargs['name'])
|
||||
+ '"docker.errors.ContainerError" The container exits with a '
|
||||
+ 'non-zero exit code and detach is False.'
|
||||
)
|
||||
corpus.status = 'failed'
|
||||
patch_operation = {
|
||||
'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}
|
||||
self.buffer_user_patch_operation(corpus, patch_operation)
|
||||
except docker.errors.ImageNotFound:
|
||||
logging.error(
|
||||
'Run "{}" container raised '.format(container_kwargs['name'])
|
||||
+ '"docker.errors.ImageNotFound" The specified image does not '
|
||||
+ 'exist.'
|
||||
)
|
||||
corpus.status = 'failed'
|
||||
patch_operation = {
|
||||
'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}
|
||||
self.buffer_user_patch_operation(corpus, patch_operation)
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Run "{}" container raised '.format(container_kwargs['name'])
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
else:
|
||||
corpus.status = 'analysing'
|
||||
patch_operation = {
|
||||
'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}
|
||||
self.buffer_user_patch_operation(corpus, patch_operation)
|
||||
|
||||
def checkout_analysing_corpus_container(self, corpus):
|
||||
container_name = 'cqpserver_{}'.format(corpus.id)
|
||||
try:
|
||||
self.docker.containers.get(container_name)
|
||||
except docker.errors.NotFound:
|
||||
logging.error('Could not find "{}" but the corpus state is "analysing".') # noqa
|
||||
corpus.status = 'prepared'
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Get "{}" container raised '.format(container_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
|
||||
def remove_cqpserver_container(self, corpus):
|
||||
container_name = 'cqpserver_{}'.format(corpus.id)
|
||||
try:
|
||||
container = self.docker.containers.get(container_name)
|
||||
except docker.errors.NotFound:
|
||||
pass
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Get "{}" container raised '.format(container_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
else:
|
||||
try:
|
||||
container.remove(force=True)
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Remove "{}" container raised '.format(container_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
corpus.status = 'prepared'
|
||||
patch_operation = {
|
||||
'op': 'replace', 'path': '/corpora/{}/status'.format(corpus.id), 'value': corpus.status}
|
||||
self.buffer_user_patch_operation(corpus, patch_operation)
|
206
app/tasks/job_utils.py
Normal file
@ -0,0 +1,206 @@
|
||||
from datetime import datetime
|
||||
from werkzeug.utils import secure_filename
|
||||
from .. import db, mail
|
||||
from ..email import create_message
|
||||
from ..models import Job, JobResult
|
||||
import docker
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
# TODO: Integrate the service_settings into app/services/__init__.py
|
||||
service_settings = {
|
||||
'file-setup': {
|
||||
'default_args': ' --mem-mb 2048 --n-cores 2',
|
||||
'ressources': docker.types.Resources(cpu_reservation=2 * (10 ** 9),
|
||||
mem_reservation=2048 * (10 ** 6))
|
||||
},
|
||||
'nlp': {
|
||||
'default_args': ' --mem-mb 2048 --n-cores 2',
|
||||
'ressources': docker.types.Resources(cpu_reservation=2 * (10 ** 9),
|
||||
mem_reservation=2048 * (10 ** 6))
|
||||
},
|
||||
'ocr': {
|
||||
'default_args': ' --mem-mb 4096 --n-cores 4',
|
||||
'ressources': docker.types.Resources(cpu_reservation=4 * (10 ** 9),
|
||||
mem_reservation=4096 * (10 ** 6))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class CheckJobsMixin:
|
||||
def check_jobs(self):
|
||||
jobs = Job.query.all()
|
||||
canceling_jobs = list(filter(lambda job: job.status == 'canceling', jobs)) # noqa
|
||||
queued_jobs = list(filter(lambda job: job.status == 'queued', jobs))
|
||||
running_jobs = list(filter(lambda job: job.status == 'running', jobs))
|
||||
submitted_jobs = list(filter(lambda job: job.status == 'submitted', jobs)) # noqa
|
||||
for job in submitted_jobs:
|
||||
self.create_job_service(job)
|
||||
for job in queued_jobs + running_jobs:
|
||||
self.checkout_job_service(job)
|
||||
for job in canceling_jobs:
|
||||
self.remove_job_service(job)
|
||||
|
||||
def create_job_service(self, job):
|
||||
cmd = job.service
|
||||
cmd += ' -i /input'
|
||||
cmd += ' -o /output'
|
||||
cmd += ' --log-dir /input'
|
||||
cmd += ' --zip [{}]_{}'.format(job.service, secure_filename(job.title))
|
||||
cmd += service_settings[job.service]['default_args']
|
||||
cmd += ' ' + ' '.join(json.loads(job.service_args))
|
||||
# Setup input mount
|
||||
input_mount_src = job.path
|
||||
input_mount_dest = os.path.abspath('/input')
|
||||
if job.service == 'file-setup':
|
||||
input_mount_dest = os.path.join(input_mount_dest, secure_filename(job.title)) # noqa
|
||||
input_mount = '{}:{}:rw'.format(input_mount_src, input_mount_dest)
|
||||
# Setup output mount
|
||||
output_mount_src = os.path.join(job.path, 'output')
|
||||
output_mount_dest = os.path.abspath('/output')
|
||||
os.makedirs(output_mount_src)
|
||||
output_mount = '{}:{}:rw'.format(output_mount_src, output_mount_dest)
|
||||
service_kwargs = {'command': cmd,
|
||||
'constraints': ['node.role==worker'],
|
||||
'labels': {'origin': 'nopaque',
|
||||
'type': 'job',
|
||||
'job_id': str(job.id)},
|
||||
'mounts': [input_mount, output_mount],
|
||||
'name': 'job_{}'.format(job.id),
|
||||
'resources': service_settings[job.service]['ressources'], # noqa
|
||||
'restart_policy': docker.types.RestartPolicy()}
|
||||
service_image = 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/{}:{}'.format(job.service, job.service_version) # noqa
|
||||
try:
|
||||
self.docker.services.create(service_image, **service_kwargs)
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Create "{}" service raised '.format(service_kwargs['name'])
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
else:
|
||||
job.status = 'queued'
|
||||
patch_operation = {'op': 'replace', 'path': '/jobs/{}/status'.format(job.id), 'value': job.status} # noqa
|
||||
self.buffer_user_patch_operation(job, patch_operation)
|
||||
finally:
|
||||
self.send_job_notification(job)
|
||||
|
||||
def checkout_job_service(self, job):
|
||||
service_name = 'job_{}'.format(job.id)
|
||||
try:
|
||||
service = self.docker.services.get(service_name)
|
||||
except docker.errors.NotFound:
|
||||
logging.error('Get "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.NotFound" The service does not exist. '
|
||||
+ '(job.status: {} -> failed)'.format(job.status))
|
||||
job.status = 'failed'
|
||||
patch_operation = {'op': 'replace', 'path': '/jobs/{}/status'.format(job.id), 'value': job.status} # noqa
|
||||
self.buffer_user_patch_operation(job, patch_operation)
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Get "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
except docker.errors.InvalidVersion:
|
||||
logging.error(
|
||||
'Get "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.InvalidVersion" One of the arguments is '
|
||||
+ 'not supported with the current API version.'
|
||||
)
|
||||
return
|
||||
else:
|
||||
service_tasks = service.tasks()
|
||||
if not service_tasks:
|
||||
return
|
||||
task_state = service_tasks[0].get('Status').get('State')
|
||||
if job.status == 'queued' and task_state != 'pending':
|
||||
job.status = 'running'
|
||||
patch_operation = {'op': 'replace', 'path': '/jobs/{}/status'.format(job.id), 'value': job.status} # noqa
|
||||
self.buffer_user_patch_operation(job, patch_operation)
|
||||
elif job.status == 'running' and task_state in ['complete', 'failed']:
|
||||
try:
|
||||
service.remove()
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Remove "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. ' # noqa
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
else:
|
||||
if task_state == 'complete':
|
||||
results_dir = os.path.join(job.path, 'output')
|
||||
result_files = filter(lambda x: x.endswith('.zip'),
|
||||
os.listdir(results_dir))
|
||||
for result_file in result_files:
|
||||
job_result = JobResult(filename=result_file, job=job) # noqa
|
||||
db.session.add(job_result)
|
||||
db.session.flush()
|
||||
db.session.refresh(job_result)
|
||||
patch_operation = {'op': 'add', 'path': '/jobs/{}/results/{}'.format(job.id, job_result.id), 'value': job_result.to_dict()} # noqa
|
||||
self.buffer_user_patch_operation(job, patch_operation) # noqa
|
||||
job.end_date = datetime.utcnow()
|
||||
patch_operation = {'op': 'replace', 'path': '/jobs/{}/end_date'.format(job.id), 'value': job.end_date.timestamp()} # noqa
|
||||
self.buffer_user_patch_operation(job, patch_operation)
|
||||
job.status = task_state
|
||||
patch_operation = {'op': 'replace', 'path': '/jobs/{}/status'.format(job.id), 'value': job.status} # noqa
|
||||
self.buffer_user_patch_operation(job, patch_operation)
|
||||
finally:
|
||||
self.send_job_notification(job)
|
||||
|
||||
def remove_job_service(self, job):
|
||||
service_name = 'job_{}'.format(job.id)
|
||||
try:
|
||||
service = self.docker.services.get(service_name)
|
||||
except docker.errors.NotFound:
|
||||
job.status = 'canceled'
|
||||
patch_operation = {'op': 'replace', 'path': '/jobs/{}/status'.format(job.id), 'value': job.status} # noqa
|
||||
self.buffer_user_patch_operation(job, patch_operation)
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Get "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
except docker.errors.InvalidVersion:
|
||||
logging.error(
|
||||
'Get "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.InvalidVersion" One of the arguments is '
|
||||
+ 'not supported with the current API version.'
|
||||
)
|
||||
return
|
||||
else:
|
||||
try:
|
||||
service.update(mounts=None)
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Update "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
return
|
||||
try:
|
||||
service.remove()
|
||||
except docker.errors.APIError as e:
|
||||
logging.error(
|
||||
'Remove "{}" service raised '.format(service_name)
|
||||
+ '"docker.errors.APIError" The server returned an error. '
|
||||
+ 'Details: {}'.format(e)
|
||||
)
|
||||
|
||||
def send_job_notification(self, job):
|
||||
if job.creator.setting_job_status_mail_notifications == 'none':
|
||||
return
|
||||
if (job.creator.setting_job_status_mail_notifications == 'end'
|
||||
and job.status not in ['complete', 'failed']):
|
||||
return
|
||||
msg = create_message(job.creator.email,
|
||||
'Status update for your Job "{}"'.format(job.title), # noqa
|
||||
'tasks/email/notification', job=job)
|
||||
mail.send(msg)
|