mirror of
https://gitlab.ub.uni-bielefeld.de/sfb1288inf/nopaque.git
synced 2025-07-04 03:33:17 +00:00
Compare commits
3 Commits
289a551122
...
access-pip
Author | SHA1 | Date | |
---|---|---|---|
2c709e65d0 | |||
71c0ddf515 | |||
5c395d1e06 |
@ -5,9 +5,9 @@
|
||||
!app
|
||||
!migrations
|
||||
!tests
|
||||
!.flaskenv
|
||||
!boot.sh
|
||||
!config.py
|
||||
!docker-nopaque-entrypoint.sh
|
||||
!nopaque.py
|
||||
!requirements.txt
|
||||
!requirements.freezed.txt
|
||||
!wsgi.py
|
||||
|
22
.env.tpl
22
.env.tpl
@ -1,20 +1,32 @@
|
||||
##############################################################################
|
||||
# Environment variables used by Docker Compose config files. #
|
||||
# Variables for use in Docker Compose YAML files #
|
||||
##############################################################################
|
||||
# HINT: Use this bash command `id -u`
|
||||
# NOTE: 0 (= root user) is not allowed
|
||||
HOST_UID=
|
||||
|
||||
# HINT: Use this bash command `id -g`
|
||||
# NOTE: 0 (= root group) is not allowed
|
||||
HOST_GID=
|
||||
|
||||
# HINT: Use this bash command `getent group docker | cut -d: -f3`
|
||||
HOST_DOCKER_GID=
|
||||
|
||||
# DEFAULT: nopaque
|
||||
NOPAQUE_DOCKER_NETWORK_NAME=nopaque
|
||||
# DOCKER_DEFAULT_NETWORK_NAME=
|
||||
|
||||
# DEFAULT: ./volumes/db/data
|
||||
# NOTE: Use `.` as <project-basedir>
|
||||
# DOCKER_DB_SERVICE_DATA_VOLUME_SOURCE_PATH=
|
||||
|
||||
# DEFAULT: ./volumes/mq/data
|
||||
# NOTE: Use `.` as <project-basedir>
|
||||
# DOCKER_MQ_SERVICE_DATA_VOLUME_SOURCE_PATH=
|
||||
|
||||
# NOTE: This must be a network share and it must be available on all
|
||||
# Docker Swarm nodes, mounted to the same path.
|
||||
HOST_NOPAQUE_DATA_PATH=/mnt/nopaque
|
||||
# Docker Swarm nodes, mounted to the same path with the same
|
||||
# user and group ownership.
|
||||
DOCKER_NOPAQUE_SERVICE_DATA_VOLUME_SOURCE_PATH=
|
||||
|
||||
# DEFAULT: ./volumes/nopaque/logs
|
||||
# NOTE: Use `.` as <project-basedir>
|
||||
# DOCKER_NOPAQUE_SERVICE_LOGS_VOLUME_SOURCE_PATH=.
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -2,6 +2,8 @@
|
||||
app/static/gen/
|
||||
volumes/
|
||||
docker-compose.override.yml
|
||||
logs/
|
||||
!logs/dummy
|
||||
*.env
|
||||
|
||||
*.pjentsch-testing
|
||||
|
18
.vscode/settings.json
vendored
18
.vscode/settings.json
vendored
@ -1,7 +1,19 @@
|
||||
{
|
||||
"editor.rulers": [79],
|
||||
"editor.tabSize": 2,
|
||||
"files.insertFinalNewline": true,
|
||||
"files.trimFinalNewlines": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
"[css]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[html]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[jinja-html]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[scss]": {
|
||||
"editor.tabSize": 2
|
||||
}
|
||||
}
|
||||
|
11
Dockerfile
11
Dockerfile
@ -35,17 +35,20 @@ ENV PATH="${NOPAQUE_PYTHON3_VENV_PATH}/bin:${PATH}"
|
||||
|
||||
|
||||
# Install Python dependencies
|
||||
COPY --chown=nopaque:nopaque requirements.freezed.txt requirements.freezed.txt
|
||||
RUN python3 -m pip install --requirement requirements.freezed.txt \
|
||||
&& rm requirements.freezed.txt
|
||||
COPY --chown=nopaque:nopaque requirements.txt requirements.txt
|
||||
RUN python3 -m pip install --requirement requirements.txt \
|
||||
&& rm requirements.txt
|
||||
|
||||
|
||||
# Install the application
|
||||
COPY docker-nopaque-entrypoint.sh /usr/local/bin/
|
||||
|
||||
COPY --chown=nopaque:nopaque app app
|
||||
COPY --chown=nopaque:nopaque migrations migrations
|
||||
COPY --chown=nopaque:nopaque tests tests
|
||||
COPY --chown=nopaque:nopaque boot.sh config.py wsgi.py ./
|
||||
COPY --chown=nopaque:nopaque .flaskenv boot.sh config.py nopaque.py requirements.txt ./
|
||||
|
||||
RUN mkdir logs
|
||||
|
||||
|
||||
EXPOSE 5000
|
||||
|
@ -35,7 +35,7 @@ username@hostname:~$ sudo mount --types cifs --options gid=${USER},password=nopa
|
||||
# Clone the nopaque repository
|
||||
username@hostname:~$ git clone https://gitlab.ub.uni-bielefeld.de/sfb1288inf/nopaque.git
|
||||
# Create data directories
|
||||
username@hostname:~$ mkdir volumes/{db,mq}
|
||||
username@hostname:~$ mkdir data/{db,logs,mq}
|
||||
username@hostname:~$ cp db.env.tpl db.env
|
||||
username@hostname:~$ cp .env.tpl .env
|
||||
# Fill out the variables within these files.
|
||||
|
154
app/__init__.py
154
app/__init__.py
@ -2,9 +2,9 @@ from apifairy import APIFairy
|
||||
from config import Config
|
||||
from docker import DockerClient
|
||||
from flask import Flask
|
||||
from flask.logging import default_handler
|
||||
from flask_apscheduler import APScheduler
|
||||
from flask_assets import Environment
|
||||
from flask_breadcrumbs import Breadcrumbs, default_breadcrumb_root
|
||||
from flask_login import LoginManager
|
||||
from flask_mail import Mail
|
||||
from flask_marshmallow import Marshmallow
|
||||
@ -13,142 +13,98 @@ from flask_paranoid import Paranoid
|
||||
from flask_socketio import SocketIO
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from flask_hashids import Hashids
|
||||
from logging import Formatter, StreamHandler
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
|
||||
|
||||
docker_client = DockerClient.from_env()
|
||||
|
||||
apifairy = APIFairy()
|
||||
assets = Environment()
|
||||
breadcrumbs = Breadcrumbs()
|
||||
db = SQLAlchemy()
|
||||
docker_client = DockerClient()
|
||||
hashids = Hashids()
|
||||
login = LoginManager()
|
||||
login.login_view = 'auth.login'
|
||||
login.login_message = 'Please log in to access this page.'
|
||||
ma = Marshmallow()
|
||||
mail = Mail()
|
||||
migrate = Migrate(compare_type=True)
|
||||
paranoid = Paranoid()
|
||||
paranoid.redirect_view = '/'
|
||||
scheduler = APScheduler()
|
||||
socketio = SocketIO()
|
||||
|
||||
|
||||
def create_app(config: Config = Config) -> Flask:
|
||||
''' Creates an initialized Flask object. '''
|
||||
|
||||
''' Creates an initialized Flask (WSGI Application) object. '''
|
||||
app = Flask(__name__)
|
||||
app.config.from_object(config)
|
||||
|
||||
# region Logging
|
||||
log_formatter = Formatter(
|
||||
fmt=app.config['NOPAQUE_LOG_FORMAT'],
|
||||
datefmt=app.config['NOPAQUE_LOG_DATE_FORMAT']
|
||||
)
|
||||
|
||||
log_handler = StreamHandler()
|
||||
log_handler.setFormatter(log_formatter)
|
||||
log_handler.setLevel(app.config['NOPAQUE_LOG_LEVEL'])
|
||||
|
||||
app.logger.setLevel('DEBUG')
|
||||
app.logger.removeHandler(default_handler)
|
||||
app.logger.addHandler(log_handler)
|
||||
# endregion Logging
|
||||
|
||||
# region Middlewares
|
||||
if app.config['NOPAQUE_PROXY_FIX_ENABLED']:
|
||||
app.wsgi_app = ProxyFix(
|
||||
app.wsgi_app,
|
||||
x_for=app.config['NOPAQUE_PROXY_FIX_X_FOR'],
|
||||
x_host=app.config['NOPAQUE_PROXY_FIX_X_HOST'],
|
||||
x_port=app.config['NOPAQUE_PROXY_FIX_X_PORT'],
|
||||
x_prefix=app.config['NOPAQUE_PROXY_FIX_X_PREFIX'],
|
||||
x_proto=app.config['NOPAQUE_PROXY_FIX_X_PROTO']
|
||||
)
|
||||
# endregion Middlewares
|
||||
|
||||
# region Extensions
|
||||
config.init_app(app)
|
||||
docker_client.login(
|
||||
app.config['NOPAQUE_DOCKER_REGISTRY_USERNAME'],
|
||||
password=app.config['NOPAQUE_DOCKER_REGISTRY_PASSWORD'],
|
||||
registry=app.config['NOPAQUE_DOCKER_REGISTRY']
|
||||
)
|
||||
|
||||
from .models import AnonymousUser, User
|
||||
|
||||
apifairy.init_app(app)
|
||||
assets.init_app(app)
|
||||
breadcrumbs.init_app(app)
|
||||
db.init_app(app)
|
||||
hashids.init_app(app)
|
||||
login.init_app(app)
|
||||
login.anonymous_user = AnonymousUser
|
||||
login.login_view = 'auth.login'
|
||||
login.user_loader(lambda user_id: User.query.get(int(user_id)))
|
||||
ma.init_app(app)
|
||||
mail.init_app(app)
|
||||
migrate.init_app(app, db)
|
||||
paranoid.init_app(app)
|
||||
paranoid.redirect_view = '/'
|
||||
scheduler.init_app(app)
|
||||
socketio.init_app(app, message_queue=app.config['NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI'])
|
||||
# endregion Extensions
|
||||
socketio.init_app(app, message_queue=app.config['NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI']) # noqa
|
||||
|
||||
# region Blueprints
|
||||
from .blueprints.admin import bp as admin_blueprint
|
||||
app.register_blueprint(admin_blueprint, url_prefix='/admin')
|
||||
|
||||
from .blueprints.api import bp as api_blueprint
|
||||
app.register_blueprint(api_blueprint, url_prefix='/api')
|
||||
|
||||
from .blueprints.auth import bp as auth_blueprint
|
||||
app.register_blueprint(auth_blueprint)
|
||||
|
||||
from .blueprints.contributions import bp as contributions_blueprint
|
||||
app.register_blueprint(contributions_blueprint, url_prefix='/contributions')
|
||||
|
||||
from .blueprints.corpora import bp as corpora_blueprint
|
||||
app.register_blueprint(corpora_blueprint, cli_group='corpus', url_prefix='/corpora')
|
||||
|
||||
from .blueprints.errors import bp as errors_bp
|
||||
app.register_blueprint(errors_bp)
|
||||
|
||||
from .blueprints.jobs import bp as jobs_blueprint
|
||||
app.register_blueprint(jobs_blueprint, url_prefix='/jobs')
|
||||
|
||||
from .blueprints.main import bp as main_blueprint
|
||||
app.register_blueprint(main_blueprint, cli_group=None)
|
||||
|
||||
from .blueprints.services import bp as services_blueprint
|
||||
app.register_blueprint(services_blueprint, url_prefix='/services')
|
||||
|
||||
from .blueprints.settings import bp as settings_blueprint
|
||||
app.register_blueprint(settings_blueprint, url_prefix='/settings')
|
||||
|
||||
from .blueprints.users import bp as users_blueprint
|
||||
app.register_blueprint(users_blueprint, cli_group='user', url_prefix='/users')
|
||||
|
||||
from .blueprints.workshops import bp as workshops_blueprint
|
||||
app.register_blueprint(workshops_blueprint, url_prefix='/workshops')
|
||||
# endregion Blueprints
|
||||
|
||||
# region SocketIO Namespaces
|
||||
from .namespaces.cqi_over_sio import CQiOverSocketIONamespace
|
||||
socketio.on_namespace(CQiOverSocketIONamespace('/cqi_over_sio'))
|
||||
|
||||
from .namespaces.users import UsersNamespace
|
||||
socketio.on_namespace(UsersNamespace('/users'))
|
||||
# endregion SocketIO Namespaces
|
||||
|
||||
# region Database event Listeners
|
||||
from .models.event_listeners import register_event_listeners
|
||||
register_event_listeners()
|
||||
# endregion Database event Listeners
|
||||
|
||||
# region Add scheduler jobs
|
||||
if app.config['NOPAQUE_IS_PRIMARY_INSTANCE']:
|
||||
from .jobs import handle_corpora
|
||||
scheduler.add_job('handle_corpora', handle_corpora, seconds=3, trigger='interval')
|
||||
from .admin import bp as admin_blueprint
|
||||
default_breadcrumb_root(admin_blueprint, '.admin')
|
||||
app.register_blueprint(admin_blueprint, url_prefix='/admin')
|
||||
|
||||
from .jobs import handle_jobs
|
||||
scheduler.add_job('handle_jobs', handle_jobs, seconds=3, trigger='interval')
|
||||
# endregion Add scheduler jobs
|
||||
from .api import bp as api_blueprint
|
||||
app.register_blueprint(api_blueprint, url_prefix='/api')
|
||||
|
||||
from .auth import bp as auth_blueprint
|
||||
default_breadcrumb_root(auth_blueprint, '.')
|
||||
app.register_blueprint(auth_blueprint)
|
||||
|
||||
from .contributions import bp as contributions_blueprint
|
||||
default_breadcrumb_root(contributions_blueprint, '.contributions')
|
||||
app.register_blueprint(contributions_blueprint, url_prefix='/contributions')
|
||||
|
||||
from .corpora import bp as corpora_blueprint
|
||||
from .corpora.cqi_over_sio import CQiNamespace
|
||||
default_breadcrumb_root(corpora_blueprint, '.corpora')
|
||||
app.register_blueprint(corpora_blueprint, cli_group='corpus', url_prefix='/corpora')
|
||||
socketio.on_namespace(CQiNamespace('/cqi_over_sio'))
|
||||
|
||||
from .errors import bp as errors_bp
|
||||
app.register_blueprint(errors_bp)
|
||||
|
||||
from .jobs import bp as jobs_blueprint
|
||||
default_breadcrumb_root(jobs_blueprint, '.jobs')
|
||||
app.register_blueprint(jobs_blueprint, url_prefix='/jobs')
|
||||
|
||||
from .main import bp as main_blueprint
|
||||
default_breadcrumb_root(main_blueprint, '.')
|
||||
app.register_blueprint(main_blueprint, cli_group=None)
|
||||
|
||||
from .services import bp as services_blueprint
|
||||
default_breadcrumb_root(services_blueprint, '.services')
|
||||
app.register_blueprint(services_blueprint, url_prefix='/services')
|
||||
|
||||
from .settings import bp as settings_blueprint
|
||||
default_breadcrumb_root(settings_blueprint, '.settings')
|
||||
app.register_blueprint(settings_blueprint, url_prefix='/settings')
|
||||
|
||||
from .users import bp as users_blueprint
|
||||
default_breadcrumb_root(users_blueprint, '.users')
|
||||
app.register_blueprint(users_blueprint, cli_group='user', url_prefix='/users')
|
||||
|
||||
from .workshops import bp as workshops_blueprint
|
||||
app.register_blueprint(workshops_blueprint, url_prefix='/workshops')
|
||||
|
||||
return app
|
||||
|
@ -1,6 +1,6 @@
|
||||
from flask import abort, request
|
||||
from app.decorators import content_negotiation
|
||||
from app import db
|
||||
from app.decorators import content_negotiation
|
||||
from app.models import User
|
||||
from . import bp
|
||||
|
@ -1,7 +1,8 @@
|
||||
from flask import abort, flash, redirect, render_template, url_for
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from app import db, hashids
|
||||
from app.models import Avatar, Corpus, Role, User
|
||||
from app.blueprints.users.settings.forms import (
|
||||
from app.users.settings.forms import (
|
||||
UpdateAvatarForm,
|
||||
UpdatePasswordForm,
|
||||
UpdateNotificationsForm,
|
||||
@ -10,9 +11,14 @@ from app.blueprints.users.settings.forms import (
|
||||
)
|
||||
from . import bp
|
||||
from .forms import UpdateUserForm
|
||||
from app.users.utils import (
|
||||
user_endpoint_arguments_constructor as user_eac,
|
||||
user_dynamic_list_constructor as user_dlc
|
||||
)
|
||||
|
||||
|
||||
@bp.route('')
|
||||
@register_breadcrumb(bp, '.', '<i class="material-icons left">admin_panel_settings</i>Administration')
|
||||
def admin():
|
||||
return render_template(
|
||||
'admin/admin.html.j2',
|
||||
@ -21,6 +27,7 @@ def admin():
|
||||
|
||||
|
||||
@bp.route('/corpora')
|
||||
@register_breadcrumb(bp, '.corpora', 'Corpora')
|
||||
def corpora():
|
||||
corpora = Corpus.query.all()
|
||||
return render_template(
|
||||
@ -31,6 +38,7 @@ def corpora():
|
||||
|
||||
|
||||
@bp.route('/users')
|
||||
@register_breadcrumb(bp, '.users', '<i class="material-icons left">group</i>Users')
|
||||
def users():
|
||||
users = User.query.all()
|
||||
return render_template(
|
||||
@ -41,6 +49,7 @@ def users():
|
||||
|
||||
|
||||
@bp.route('/users/<hashid:user_id>')
|
||||
@register_breadcrumb(bp, '.users.entity', '', dynamic_list_constructor=user_dlc)
|
||||
def user(user_id):
|
||||
user = User.query.get_or_404(user_id)
|
||||
corpora = Corpus.query.filter(Corpus.user == user).all()
|
||||
@ -53,6 +62,7 @@ def user(user_id):
|
||||
|
||||
|
||||
@bp.route('/users/<hashid:user_id>/settings', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.users.entity.settings', '<i class="material-icons left">settings</i>Settings')
|
||||
def user_settings(user_id):
|
||||
user = User.query.get_or_404(user_id)
|
||||
update_account_information_form = UpdateAccountInformationForm(user)
|
@ -5,8 +5,8 @@ from flask import abort, Blueprint
|
||||
from werkzeug.exceptions import InternalServerError
|
||||
from app import db, hashids
|
||||
from app.models import Job, JobInput, JobStatus, TesseractOCRPipelineModel
|
||||
from .auth import auth_error_responses, token_auth
|
||||
from .schemas import EmptySchema, JobSchema, SpaCyNLPPipelineJobSchema, TesseractOCRPipelineJobSchema, TesseractOCRPipelineModelSchema
|
||||
from .auth import auth_error_responses, token_auth
|
||||
|
||||
|
||||
bp = Blueprint('jobs', __name__)
|
||||
@ -77,7 +77,7 @@ def delete_job(job_id):
|
||||
job = Job.query.get(job_id)
|
||||
if job is None:
|
||||
abort(404)
|
||||
if not (job.user == current_user or current_user.is_administrator):
|
||||
if not (job.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
try:
|
||||
job.delete()
|
||||
@ -97,6 +97,6 @@ def get_job(job_id):
|
||||
job = Job.query.get(job_id)
|
||||
if job is None:
|
||||
abort(404)
|
||||
if not (job.user == current_user or current_user.is_administrator):
|
||||
if not (job.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return job
|
@ -10,7 +10,7 @@ from app.models import (
|
||||
User,
|
||||
UserSettingJobStatusMailNotificationLevel
|
||||
)
|
||||
from app.blueprints.services import SERVICES
|
||||
from app.services import SERVICES
|
||||
|
||||
|
||||
|
@ -3,11 +3,11 @@ from apifairy import authenticate, body, response
|
||||
from apifairy.decorators import other_responses
|
||||
from flask import abort, Blueprint
|
||||
from werkzeug.exceptions import InternalServerError
|
||||
from app.email import create_message, send
|
||||
from app import db
|
||||
from app.email import create_message, send
|
||||
from app.models import User
|
||||
from .auth import auth_error_responses, token_auth
|
||||
from .schemas import EmptySchema, UserSchema
|
||||
from .auth import auth_error_responses, token_auth
|
||||
|
||||
|
||||
bp = Blueprint('users', __name__)
|
||||
@ -60,7 +60,7 @@ def delete_user(user_id):
|
||||
user = User.query.get(user_id)
|
||||
if user is None:
|
||||
abort(404)
|
||||
if not (user == current_user or current_user.is_administrator):
|
||||
if not (user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
user.delete()
|
||||
db.session.commit()
|
||||
@ -78,7 +78,7 @@ def get_user(user_id):
|
||||
user = User.query.get(user_id)
|
||||
if user is None:
|
||||
abort(404)
|
||||
if not (user == current_user or current_user.is_administrator):
|
||||
if not (user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return user
|
||||
|
||||
@ -94,6 +94,6 @@ def get_user_by_username(username):
|
||||
user = User.query.filter(User.username == username).first()
|
||||
if user is None:
|
||||
abort(404)
|
||||
if not (user == current_user or current_user.is_administrator):
|
||||
if not (user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return user
|
@ -1,4 +1,5 @@
|
||||
from flask import abort, flash, redirect, render_template, request, url_for
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from flask_login import current_user, login_user, login_required, logout_user
|
||||
from app import db
|
||||
from app.email import create_message, send
|
||||
@ -18,9 +19,7 @@ def before_request():
|
||||
Checks if a user is unconfirmed when visiting specific sites. Redirects to
|
||||
unconfirmed view if user is unconfirmed.
|
||||
"""
|
||||
if not current_user.is_authenticated:
|
||||
return
|
||||
|
||||
if current_user.is_authenticated:
|
||||
current_user.ping()
|
||||
db.session.commit()
|
||||
if (not current_user.confirmed
|
||||
@ -28,11 +27,10 @@ def before_request():
|
||||
and request.blueprint != 'auth'
|
||||
and request.endpoint != 'static'):
|
||||
return redirect(url_for('auth.unconfirmed'))
|
||||
if not current_user.terms_of_use_accepted:
|
||||
return redirect(url_for('main.terms_of_use'))
|
||||
|
||||
|
||||
@bp.route('/register', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.register', 'Register')
|
||||
def register():
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
@ -69,6 +67,7 @@ def register():
|
||||
|
||||
|
||||
@bp.route('/login', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.login', 'Login')
|
||||
def login():
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
@ -99,6 +98,7 @@ def logout():
|
||||
|
||||
|
||||
@bp.route('/unconfirmed')
|
||||
@register_breadcrumb(bp, '.unconfirmed', 'Unconfirmed')
|
||||
@login_required
|
||||
def unconfirmed():
|
||||
if current_user.confirmed:
|
||||
@ -141,6 +141,7 @@ def confirm(token):
|
||||
|
||||
|
||||
@bp.route('/reset-password-request', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.reset_password_request', 'Password Reset')
|
||||
def reset_password_request():
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
||||
@ -170,6 +171,7 @@ def reset_password_request():
|
||||
|
||||
|
||||
@bp.route('/reset-password/<token>', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.reset_password', 'Password Reset')
|
||||
def reset_password(token):
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.dashboard'))
|
@ -1,18 +0,0 @@
|
||||
from flask import Blueprint
|
||||
from flask_login import login_required
|
||||
|
||||
|
||||
bp = Blueprint('jobs', __name__)
|
||||
|
||||
|
||||
@bp.before_request
|
||||
@login_required
|
||||
def before_request():
|
||||
'''
|
||||
Ensures that the routes in this package can only be visited by users that
|
||||
are logged in.
|
||||
'''
|
||||
pass
|
||||
|
||||
|
||||
from . import routes, json_routes
|
@ -1,7 +1,9 @@
|
||||
from flask import redirect, url_for
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from . import bp
|
||||
|
||||
|
||||
@bp.route('')
|
||||
@register_breadcrumb(bp, '.', '<i class="material-icons left">new_label</i>My Contributions')
|
||||
def contributions():
|
||||
return redirect(url_for('main.dashboard', _anchor='contributions'))
|
@ -1,7 +1,7 @@
|
||||
from flask_wtf.file import FileField, FileRequired
|
||||
from wtforms import StringField, ValidationError
|
||||
from wtforms.validators import InputRequired, Length
|
||||
from app.blueprints.services import SERVICES
|
||||
from app.services import SERVICES
|
||||
from ..forms import ContributionBaseForm, UpdateContributionBaseForm
|
||||
|
||||
|
@ -4,7 +4,7 @@ from threading import Thread
|
||||
from app import db
|
||||
from app.decorators import content_negotiation, permission_required
|
||||
from app.models import SpaCyNLPPipelineModel
|
||||
from . import bp
|
||||
from .. import bp
|
||||
|
||||
|
||||
@bp.route('/spacy-nlp-pipeline-models/<hashid:spacy_nlp_pipeline_model_id>', methods=['DELETE'])
|
||||
@ -17,7 +17,7 @@ def delete_spacy_model(spacy_nlp_pipeline_model_id):
|
||||
db.session.commit()
|
||||
|
||||
snpm = SpaCyNLPPipelineModel.query.get_or_404(spacy_nlp_pipeline_model_id)
|
||||
if not (snpm.user == current_user or current_user.is_administrator):
|
||||
if not (snpm.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
thread = Thread(
|
||||
target=_delete_spacy_model,
|
||||
@ -39,7 +39,7 @@ def update_spacy_nlp_pipeline_model_is_public(spacy_nlp_pipeline_model_id):
|
||||
if not isinstance(is_public, bool):
|
||||
abort(400)
|
||||
snpm = SpaCyNLPPipelineModel.query.get_or_404(spacy_nlp_pipeline_model_id)
|
||||
if not (snpm.user == current_user or current_user.is_administrator):
|
||||
if not (snpm.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
snpm.is_public = is_public
|
||||
db.session.commit()
|
@ -1,4 +1,5 @@
|
||||
from flask import abort, flash, redirect, render_template, url_for
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from flask_login import current_user
|
||||
from app import db
|
||||
from app.models import SpaCyNLPPipelineModel
|
||||
@ -7,9 +8,13 @@ from .forms import (
|
||||
CreateSpaCyNLPPipelineModelForm,
|
||||
UpdateSpaCyNLPPipelineModelForm
|
||||
)
|
||||
from .utils import (
|
||||
spacy_nlp_pipeline_model_dlc as spacy_nlp_pipeline_model_dlc
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/spacy-nlp-pipeline-models')
|
||||
@register_breadcrumb(bp, '.spacy_nlp_pipeline_models', 'SpaCy NLP Pipeline Models')
|
||||
def spacy_nlp_pipeline_models():
|
||||
return render_template(
|
||||
'contributions/spacy_nlp_pipeline_models/spacy_nlp_pipeline_models.html.j2',
|
||||
@ -18,6 +23,7 @@ def spacy_nlp_pipeline_models():
|
||||
|
||||
|
||||
@bp.route('/spacy-nlp-pipeline-models/create', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.spacy_nlp_pipeline_models.create', 'Create')
|
||||
def create_spacy_nlp_pipeline_model():
|
||||
form = CreateSpaCyNLPPipelineModelForm()
|
||||
if form.is_submitted():
|
||||
@ -51,9 +57,10 @@ def create_spacy_nlp_pipeline_model():
|
||||
|
||||
|
||||
@bp.route('/spacy-nlp-pipeline-models/<hashid:spacy_nlp_pipeline_model_id>', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.spacy_nlp_pipeline_models.entity', '', dynamic_list_constructor=spacy_nlp_pipeline_model_dlc)
|
||||
def spacy_nlp_pipeline_model(spacy_nlp_pipeline_model_id):
|
||||
snpm = SpaCyNLPPipelineModel.query.get_or_404(spacy_nlp_pipeline_model_id)
|
||||
if not (snpm.user == current_user or current_user.is_administrator):
|
||||
if not (snpm.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
form = UpdateSpaCyNLPPipelineModelForm(data=snpm.to_json_serializeable())
|
||||
if form.validate_on_submit():
|
13
app/contributions/spacy_nlp_pipeline_models/utils.py
Normal file
13
app/contributions/spacy_nlp_pipeline_models/utils.py
Normal file
@ -0,0 +1,13 @@
|
||||
from flask import request, url_for
|
||||
from app.models import SpaCyNLPPipelineModel
|
||||
|
||||
|
||||
def spacy_nlp_pipeline_model_dlc():
|
||||
snpm_id = request.view_args['spacy_nlp_pipeline_model_id']
|
||||
snpm = SpaCyNLPPipelineModel.query.get_or_404(snpm_id)
|
||||
return [
|
||||
{
|
||||
'text': f'{snpm.title} {snpm.version}',
|
||||
'url': url_for('.spacy_nlp_pipeline_model', spacy_nlp_pipeline_model_id=snpm_id)
|
||||
}
|
||||
]
|
@ -1,6 +1,6 @@
|
||||
from flask_wtf.file import FileField, FileRequired
|
||||
from wtforms import ValidationError
|
||||
from app.blueprints.services import SERVICES
|
||||
from app.services import SERVICES
|
||||
from ..forms import ContributionBaseForm, UpdateContributionBaseForm
|
||||
|
||||
|
@ -17,7 +17,7 @@ def delete_tesseract_model(tesseract_ocr_pipeline_model_id):
|
||||
db.session.commit()
|
||||
|
||||
topm = TesseractOCRPipelineModel.query.get_or_404(tesseract_ocr_pipeline_model_id)
|
||||
if not (topm.user == current_user or current_user.is_administrator):
|
||||
if not (topm.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
thread = Thread(
|
||||
target=_delete_tesseract_ocr_pipeline_model,
|
||||
@ -39,7 +39,7 @@ def update_tesseract_ocr_pipeline_model_is_public(tesseract_ocr_pipeline_model_i
|
||||
if not isinstance(is_public, bool):
|
||||
abort(400)
|
||||
topm = TesseractOCRPipelineModel.query.get_or_404(tesseract_ocr_pipeline_model_id)
|
||||
if not (topm.user == current_user or current_user.is_administrator):
|
||||
if not (topm.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
topm.is_public = is_public
|
||||
db.session.commit()
|
@ -1,4 +1,5 @@
|
||||
from flask import abort, flash, redirect, render_template, url_for
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from flask_login import current_user
|
||||
from app import db
|
||||
from app.models import TesseractOCRPipelineModel
|
||||
@ -7,9 +8,13 @@ from .forms import (
|
||||
CreateTesseractOCRPipelineModelForm,
|
||||
UpdateTesseractOCRPipelineModelForm
|
||||
)
|
||||
from .utils import (
|
||||
tesseract_ocr_pipeline_model_dlc as tesseract_ocr_pipeline_model_dlc
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/tesseract-ocr-pipeline-models')
|
||||
@register_breadcrumb(bp, '.tesseract_ocr_pipeline_models', 'Tesseract OCR Pipeline Models')
|
||||
def tesseract_ocr_pipeline_models():
|
||||
return render_template(
|
||||
'contributions/tesseract_ocr_pipeline_models/tesseract_ocr_pipeline_models.html.j2',
|
||||
@ -18,6 +23,7 @@ def tesseract_ocr_pipeline_models():
|
||||
|
||||
|
||||
@bp.route('/tesseract-ocr-pipeline-models/create', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.tesseract_ocr_pipeline_models.create', 'Create')
|
||||
def create_tesseract_ocr_pipeline_model():
|
||||
form = CreateTesseractOCRPipelineModelForm()
|
||||
if form.is_submitted():
|
||||
@ -50,9 +56,10 @@ def create_tesseract_ocr_pipeline_model():
|
||||
|
||||
|
||||
@bp.route('/tesseract-ocr-pipeline-models/<hashid:tesseract_ocr_pipeline_model_id>', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.tesseract_ocr_pipeline_models.entity', '', dynamic_list_constructor=tesseract_ocr_pipeline_model_dlc)
|
||||
def tesseract_ocr_pipeline_model(tesseract_ocr_pipeline_model_id):
|
||||
topm = TesseractOCRPipelineModel.query.get_or_404(tesseract_ocr_pipeline_model_id)
|
||||
if not (topm.user == current_user or current_user.is_administrator):
|
||||
if not (topm.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
form = UpdateTesseractOCRPipelineModelForm(data=topm.to_json_serializeable())
|
||||
if form.validate_on_submit():
|
13
app/contributions/tesseract_ocr_pipeline_models/utils.py
Normal file
13
app/contributions/tesseract_ocr_pipeline_models/utils.py
Normal file
@ -0,0 +1,13 @@
|
||||
from flask import request, url_for
|
||||
from app.models import TesseractOCRPipelineModel
|
||||
|
||||
|
||||
def tesseract_ocr_pipeline_model_dlc():
|
||||
topm_id = request.view_args['tesseract_ocr_pipeline_model_id']
|
||||
topm = TesseractOCRPipelineModel.query.get_or_404(topm_id)
|
||||
return [
|
||||
{
|
||||
'text': f'{topm.title} {topm.version}',
|
||||
'url': url_for('.tesseract_ocr_pipeline_model', tesseract_ocr_pipeline_model_id=topm_id)
|
||||
}
|
||||
]
|
@ -1,10 +1,11 @@
|
||||
from datetime import datetime
|
||||
from flask import current_app
|
||||
from pathlib import Path
|
||||
import json
|
||||
import shutil
|
||||
from app import db
|
||||
from app.models import User, Corpus, CorpusFile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
import json
|
||||
import shutil
|
||||
|
||||
|
||||
class SandpaperConverter:
|
||||
@ -14,7 +15,7 @@ class SandpaperConverter:
|
||||
|
||||
def run(self):
|
||||
with self.json_db_file.open('r') as f:
|
||||
json_db: list[dict] = json.load(f)
|
||||
json_db: List[Dict] = json.load(f)
|
||||
|
||||
for json_user in json_db:
|
||||
if not json_user['confirmed']:
|
||||
@ -25,7 +26,7 @@ class SandpaperConverter:
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def convert_user(self, json_user: dict, user_dir: Path):
|
||||
def convert_user(self, json_user: Dict, user_dir: Path):
|
||||
current_app.logger.info(f'Create User {json_user["username"]}...')
|
||||
try:
|
||||
user = User.create(
|
||||
@ -47,7 +48,7 @@ class SandpaperConverter:
|
||||
current_app.logger.info('Done')
|
||||
|
||||
|
||||
def convert_corpus(self, json_corpus: dict, user: User, corpus_dir: Path):
|
||||
def convert_corpus(self, json_corpus: Dict, user: User, corpus_dir: Path):
|
||||
current_app.logger.info(f'Create Corpus {json_corpus["title"]}...')
|
||||
try:
|
||||
corpus = Corpus.create(
|
||||
@ -63,7 +64,7 @@ class SandpaperConverter:
|
||||
current_app.logger.info('Done')
|
||||
|
||||
|
||||
def convert_corpus_file(self, json_corpus_file: dict, corpus: Corpus, corpus_dir: Path):
|
||||
def convert_corpus_file(self, json_corpus_file: Dict, corpus: Corpus, corpus_dir: Path):
|
||||
current_app.logger.info(f'Create CorpusFile {json_corpus_file["title"]}...')
|
||||
corpus_file = CorpusFile(
|
||||
corpus=corpus,
|
||||
|
@ -1,16 +1,17 @@
|
||||
from cqi import CQiClient
|
||||
from cqi.errors import CQiException
|
||||
from cqi.status import CQiStatus
|
||||
from flask import current_app
|
||||
from docker.models.containers import Container
|
||||
from flask import current_app, session
|
||||
from flask_login import current_user
|
||||
from flask_socketio import Namespace
|
||||
from inspect import signature
|
||||
from threading import Lock
|
||||
from typing import Callable, Dict, List, Optional
|
||||
from app import db, docker_client, hashids, socketio
|
||||
from app.decorators import socketio_login_required
|
||||
from app.models import Corpus, CorpusStatus
|
||||
from . import extensions
|
||||
from .utils import CQiOverSocketIOSessionManager
|
||||
|
||||
|
||||
'''
|
||||
@ -18,7 +19,7 @@ This package tunnels the Corpus Query interface (CQi) protocol through
|
||||
Socket.IO (SIO) by tunneling CQi API calls through an event called "exec".
|
||||
|
||||
Basic concept:
|
||||
1. A client connects to the namespace.
|
||||
1. A client connects to the "/cqi_over_sio" namespace.
|
||||
2. The client emits the "init" event and provides a corpus id for the corpus
|
||||
that should be analysed in this session.
|
||||
1.1 The analysis session counter of the corpus is incremented.
|
||||
@ -27,17 +28,17 @@ Basic concept:
|
||||
1.4 Connect the CQiClient to the server.
|
||||
1.5 Save the CQiClient, the Lock and the corpus id in the session for
|
||||
subsequential use.
|
||||
3. The client emits "exec" events, within which it provides the name of a CQi
|
||||
API function and the corresponding arguments.
|
||||
3.1 The "exec" event handler will execute the function, make sure that
|
||||
the result is serializable and returns the result back to the client.
|
||||
4. The client disconnects from the namespace
|
||||
4.1 The analysis session counter of the corpus is decremented.
|
||||
4.2 The CQiClient and (Mutex) Lock belonging to it are teared down.
|
||||
2. The client emits the "exec" event provides the name of a CQi API function
|
||||
arguments (optional).
|
||||
- The event "exec" handler will execute the function, make sure that the
|
||||
result is serializable and returns the result back to the client.
|
||||
4. Wait for more events
|
||||
5. The client disconnects from the "/cqi_over_sio" namespace
|
||||
1.1 The analysis session counter of the corpus is decremented.
|
||||
1.2 The CQiClient and (Mutex) Lock belonging to it are teared down.
|
||||
'''
|
||||
|
||||
|
||||
CQI_API_FUNCTION_NAMES = [
|
||||
CQI_API_FUNCTION_NAMES: List[str] = [
|
||||
'ask_feature_cl_2_3',
|
||||
'ask_feature_cqi_1_0',
|
||||
'ask_feature_cqp_2_3',
|
||||
@ -85,80 +86,68 @@ CQI_API_FUNCTION_NAMES = [
|
||||
]
|
||||
|
||||
|
||||
class CQiOverSocketIONamespace(Namespace):
|
||||
class CQiNamespace(Namespace):
|
||||
@socketio_login_required
|
||||
def on_connect(self):
|
||||
pass
|
||||
|
||||
@socketio_login_required
|
||||
def on_init(self, corpus_hashid: str) -> dict:
|
||||
corpus_id = hashids.decode(corpus_hashid)
|
||||
|
||||
if not isinstance(corpus_id, int):
|
||||
return {'code': 400, 'msg': 'Bad Request'}
|
||||
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
|
||||
if corpus is None:
|
||||
def on_init(self, db_corpus_hashid: str):
|
||||
db_corpus_id: int = hashids.decode(db_corpus_hashid)
|
||||
db_corpus: Optional[Corpus] = Corpus.query.get(db_corpus_id)
|
||||
if db_corpus is None:
|
||||
return {'code': 404, 'msg': 'Not Found'}
|
||||
|
||||
if not (
|
||||
corpus.user == current_user
|
||||
or current_user.is_following_corpus(corpus)
|
||||
or current_user.is_administrator
|
||||
):
|
||||
if not (db_corpus.user == current_user
|
||||
or current_user.is_following_corpus(db_corpus)
|
||||
or current_user.is_administrator()):
|
||||
return {'code': 403, 'msg': 'Forbidden'}
|
||||
|
||||
if corpus.status not in [
|
||||
if db_corpus.status not in [
|
||||
CorpusStatus.BUILT,
|
||||
CorpusStatus.STARTING_ANALYSIS_SESSION,
|
||||
CorpusStatus.RUNNING_ANALYSIS_SESSION,
|
||||
CorpusStatus.CANCELING_ANALYSIS_SESSION
|
||||
]:
|
||||
return {'code': 424, 'msg': 'Failed Dependency'}
|
||||
|
||||
corpus.num_analysis_sessions = Corpus.num_analysis_sessions + 1
|
||||
if db_corpus.num_analysis_sessions is None:
|
||||
db_corpus.num_analysis_sessions = 0
|
||||
db.session.commit()
|
||||
retry_counter = 20
|
||||
while corpus.status != CorpusStatus.RUNNING_ANALYSIS_SESSION:
|
||||
db_corpus.num_analysis_sessions = Corpus.num_analysis_sessions + 1
|
||||
db.session.commit()
|
||||
retry_counter: int = 20
|
||||
while db_corpus.status != CorpusStatus.RUNNING_ANALYSIS_SESSION:
|
||||
if retry_counter == 0:
|
||||
corpus.num_analysis_sessions = Corpus.num_analysis_sessions - 1
|
||||
db_corpus.num_analysis_sessions = Corpus.num_analysis_sessions - 1
|
||||
db.session.commit()
|
||||
return {'code': 408, 'msg': 'Request Timeout'}
|
||||
socketio.sleep(3)
|
||||
retry_counter -= 1
|
||||
db.session.refresh(corpus)
|
||||
|
||||
cqpserver_container_name = f'nopaque-cqpserver-{corpus_id}'
|
||||
cqpserver_container = docker_client.containers.get(cqpserver_container_name)
|
||||
cqpserver_ip_address = cqpserver_container.attrs['NetworkSettings']['Networks'][current_app.config['NOPAQUE_DOCKER_NETWORK_NAME']]['IPAddress']
|
||||
cqi_client = CQiClient(cqpserver_ip_address)
|
||||
cqi_client_lock = Lock()
|
||||
|
||||
CQiOverSocketIOSessionManager.setup()
|
||||
CQiOverSocketIOSessionManager.set_corpus_id(corpus_id)
|
||||
CQiOverSocketIOSessionManager.set_cqi_client(cqi_client)
|
||||
CQiOverSocketIOSessionManager.set_cqi_client_lock(cqi_client_lock)
|
||||
|
||||
db.session.refresh(db_corpus)
|
||||
# cqi_client: CQiClient = CQiClient(f'cqpserver_{db_corpus_id}')
|
||||
cqpserver_container_name: str = f'cqpserver_{db_corpus_id}'
|
||||
cqpserver_container: Container = docker_client.containers.get(cqpserver_container_name)
|
||||
cqpserver_host: str = cqpserver_container.attrs['NetworkSettings']['Networks'][current_app.config['NOPAQUE_DOCKER_NETWORK_NAME']]['IPAddress']
|
||||
cqi_client: CQiClient = CQiClient(cqpserver_host)
|
||||
session['cqi_over_sio'] = {
|
||||
'cqi_client': cqi_client,
|
||||
'cqi_client_lock': Lock(),
|
||||
'db_corpus_id': db_corpus_id
|
||||
}
|
||||
return {'code': 200, 'msg': 'OK'}
|
||||
|
||||
@socketio_login_required
|
||||
def on_exec(self, fn_name: str, fn_args: dict = {}) -> dict:
|
||||
def on_exec(self, fn_name: str, fn_args: Dict = {}):
|
||||
try:
|
||||
cqi_client = CQiOverSocketIOSessionManager.get_cqi_client()
|
||||
cqi_client_lock = CQiOverSocketIOSessionManager.get_cqi_client_lock()
|
||||
cqi_client: CQiClient = session['cqi_over_sio']['cqi_client']
|
||||
cqi_client_lock: Lock = session['cqi_over_sio']['cqi_client_lock']
|
||||
except KeyError:
|
||||
return {'code': 424, 'msg': 'Failed Dependency'}
|
||||
|
||||
if fn_name in CQI_API_FUNCTION_NAMES:
|
||||
fn = getattr(cqi_client.api, fn_name)
|
||||
fn: Callable = getattr(cqi_client.api, fn_name)
|
||||
elif fn_name in extensions.CQI_EXTENSION_FUNCTION_NAMES:
|
||||
fn = getattr(extensions, fn_name)
|
||||
fn: Callable = getattr(extensions, fn_name)
|
||||
else:
|
||||
return {'code': 400, 'msg': 'Bad Request'}
|
||||
|
||||
for param in signature(fn).parameters.values():
|
||||
# Check if the parameter is optional or required
|
||||
if param.default is param.empty:
|
||||
if param.name not in fn_args:
|
||||
return {'code': 400, 'msg': 'Bad Request'}
|
||||
@ -167,7 +156,6 @@ class CQiOverSocketIONamespace(Namespace):
|
||||
continue
|
||||
if type(fn_args[param.name]) is not param.annotation:
|
||||
return {'code': 400, 'msg': 'Bad Request'}
|
||||
|
||||
cqi_client_lock.acquire()
|
||||
try:
|
||||
fn_return_value = fn(**fn_args)
|
||||
@ -185,7 +173,6 @@ class CQiOverSocketIONamespace(Namespace):
|
||||
}
|
||||
finally:
|
||||
cqi_client_lock.release()
|
||||
|
||||
if isinstance(fn_return_value, CQiStatus):
|
||||
payload = {
|
||||
'code': fn_return_value.code,
|
||||
@ -193,31 +180,27 @@ class CQiOverSocketIONamespace(Namespace):
|
||||
}
|
||||
else:
|
||||
payload = fn_return_value
|
||||
|
||||
return {'code': 200, 'msg': 'OK', 'payload': payload}
|
||||
|
||||
def on_disconnect(self):
|
||||
try:
|
||||
corpus_id = CQiOverSocketIOSessionManager.get_corpus_id()
|
||||
cqi_client = CQiOverSocketIOSessionManager.get_cqi_client()
|
||||
cqi_client_lock = CQiOverSocketIOSessionManager.get_cqi_client_lock()
|
||||
CQiOverSocketIOSessionManager.teardown()
|
||||
cqi_client: CQiClient = session['cqi_over_sio']['cqi_client']
|
||||
cqi_client_lock: Lock = session['cqi_over_sio']['cqi_client_lock']
|
||||
db_corpus_id: int = session['cqi_over_sio']['db_corpus_id']
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
cqi_client_lock.acquire()
|
||||
|
||||
try:
|
||||
session.pop('cqi_over_sio')
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
cqi_client.api.ctrl_bye()
|
||||
except (BrokenPipeError, CQiException):
|
||||
pass
|
||||
|
||||
cqi_client_lock.release()
|
||||
|
||||
corpus = Corpus.query.get(corpus_id)
|
||||
|
||||
if corpus is None:
|
||||
db_corpus: Optional[Corpus] = Corpus.query.get(db_corpus_id)
|
||||
if db_corpus is None:
|
||||
return
|
||||
|
||||
corpus.num_analysis_sessions = Corpus.num_analysis_sessions - 1
|
||||
db_corpus.num_analysis_sessions = Corpus.num_analysis_sessions - 1
|
||||
db.session.commit()
|
@ -1,17 +1,23 @@
|
||||
from collections import Counter
|
||||
from cqi import CQiClient
|
||||
from cqi.models.corpora import Corpus as CQiCorpus
|
||||
from cqi.models.subcorpora import Subcorpus as CQiSubcorpus
|
||||
from cqi.models.attributes import (
|
||||
PositionalAttribute as CQiPositionalAttribute,
|
||||
StructuralAttribute as CQiStructuralAttribute
|
||||
)
|
||||
from cqi.status import StatusOk as CQiStatusOk
|
||||
from flask import current_app
|
||||
from flask import session
|
||||
from typing import Dict, List
|
||||
import gzip
|
||||
import json
|
||||
import math
|
||||
from app import db
|
||||
from app.models import Corpus
|
||||
from .utils import CQiOverSocketIOSessionManager
|
||||
from .utils import lookups_by_cpos, partial_export_subcorpus, export_subcorpus
|
||||
|
||||
|
||||
CQI_EXTENSION_FUNCTION_NAMES = [
|
||||
CQI_EXTENSION_FUNCTION_NAMES: List[str] = [
|
||||
'ext_corpus_update_db',
|
||||
'ext_corpus_static_data',
|
||||
'ext_corpus_paginate_corpus',
|
||||
@ -22,28 +28,28 @@ CQI_EXTENSION_FUNCTION_NAMES = [
|
||||
|
||||
|
||||
def ext_corpus_update_db(corpus: str) -> CQiStatusOk:
|
||||
corpus_id = CQiOverSocketIOSessionManager.get_corpus_id()
|
||||
cqi_client = CQiOverSocketIOSessionManager.get_cqi_client()
|
||||
db_corpus = Corpus.query.get(corpus_id)
|
||||
cqi_corpus = cqi_client.corpora.get(corpus)
|
||||
cqi_client: CQiClient = session['cqi_over_sio']['cqi_client']
|
||||
db_corpus_id: int = session['cqi_over_sio']['db_corpus_id']
|
||||
db_corpus: Corpus = Corpus.query.get(db_corpus_id)
|
||||
cqi_corpus: CQiCorpus = cqi_client.corpora.get(corpus)
|
||||
db_corpus.num_tokens = cqi_corpus.size
|
||||
db.session.commit()
|
||||
return CQiStatusOk()
|
||||
|
||||
|
||||
def ext_corpus_static_data(corpus: str) -> dict:
|
||||
corpus_id = CQiOverSocketIOSessionManager.get_corpus_id()
|
||||
db_corpus = Corpus.query.get(corpus_id)
|
||||
def ext_corpus_static_data(corpus: str) -> Dict:
|
||||
db_corpus_id: int = session['cqi_over_sio']['db_corpus_id']
|
||||
db_corpus: Corpus = Corpus.query.get(db_corpus_id)
|
||||
|
||||
static_data_file_path = db_corpus.path / 'cwb' / 'static.json.gz'
|
||||
if static_data_file_path.exists():
|
||||
with static_data_file_path.open('rb') as f:
|
||||
return f.read()
|
||||
|
||||
cqi_client = CQiOverSocketIOSessionManager.get_cqi_client()
|
||||
cqi_corpus = cqi_client.corpora.get(corpus)
|
||||
cqi_p_attrs = cqi_corpus.positional_attributes.list()
|
||||
cqi_s_attrs = cqi_corpus.structural_attributes.list()
|
||||
cqi_client: CQiClient = session['cqi_over_sio']['cqi_client']
|
||||
cqi_corpus: CQiCorpus = cqi_client.corpora.get(corpus)
|
||||
cqi_p_attrs: List[CQiPositionalAttribute] = cqi_corpus.positional_attributes.list()
|
||||
cqi_s_attrs: List[CQiStructuralAttribute] = cqi_corpus.structural_attributes.list()
|
||||
|
||||
static_data = {
|
||||
'corpus': {
|
||||
@ -56,21 +62,21 @@ def ext_corpus_static_data(corpus: str) -> dict:
|
||||
}
|
||||
|
||||
for p_attr in cqi_p_attrs:
|
||||
current_app.logger.info(f'corpus.freqs.{p_attr.name}')
|
||||
print(f'corpus.freqs.{p_attr.name}')
|
||||
static_data['corpus']['freqs'][p_attr.name] = []
|
||||
p_attr_id_list = list(range(p_attr.lexicon_size))
|
||||
p_attr_id_list: List[int] = list(range(p_attr.lexicon_size))
|
||||
static_data['corpus']['freqs'][p_attr.name].extend(p_attr.freqs_by_ids(p_attr_id_list))
|
||||
del p_attr_id_list
|
||||
|
||||
current_app.logger.info(f'p_attrs.{p_attr.name}')
|
||||
print(f'p_attrs.{p_attr.name}')
|
||||
static_data['p_attrs'][p_attr.name] = []
|
||||
cpos_list = list(range(cqi_corpus.size))
|
||||
cpos_list: List[int] = list(range(cqi_corpus.size))
|
||||
static_data['p_attrs'][p_attr.name].extend(p_attr.ids_by_cpos(cpos_list))
|
||||
del cpos_list
|
||||
|
||||
current_app.logger.info(f'values.p_attrs.{p_attr.name}')
|
||||
print(f'values.p_attrs.{p_attr.name}')
|
||||
static_data['values']['p_attrs'][p_attr.name] = []
|
||||
p_attr_id_list = list(range(p_attr.lexicon_size))
|
||||
p_attr_id_list: List[int] = list(range(p_attr.lexicon_size))
|
||||
static_data['values']['p_attrs'][p_attr.name].extend(p_attr.values_by_ids(p_attr_id_list))
|
||||
del p_attr_id_list
|
||||
|
||||
@ -86,9 +92,9 @@ def ext_corpus_static_data(corpus: str) -> dict:
|
||||
# Note: Needs more testing, don't use it in production #
|
||||
##############################################################
|
||||
cqi_corpus.query('Last', f'<{s_attr.name}> []* </{s_attr.name}>;')
|
||||
cqi_subcorpus = cqi_corpus.subcorpora.get('Last')
|
||||
first_match = 0
|
||||
last_match = cqi_subcorpus.size - 1
|
||||
cqi_subcorpus: CQiSubcorpus = cqi_corpus.subcorpora.get('Last')
|
||||
first_match: int = 0
|
||||
last_match: int = cqi_subcorpus.size - 1
|
||||
match_boundaries = zip(
|
||||
range(first_match, last_match + 1),
|
||||
cqi_subcorpus.dump(
|
||||
@ -106,7 +112,7 @@ def ext_corpus_static_data(corpus: str) -> dict:
|
||||
del cqi_subcorpus, first_match, last_match
|
||||
for id, lbound, rbound in match_boundaries:
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'].append({})
|
||||
current_app.logger.info(f's_attrs.{s_attr.name}.lexicon.{id}.bounds')
|
||||
print(f's_attrs.{s_attr.name}.lexicon.{id}.bounds')
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'][id]['bounds'] = [lbound, rbound]
|
||||
del match_boundaries
|
||||
|
||||
@ -118,33 +124,33 @@ def ext_corpus_static_data(corpus: str) -> dict:
|
||||
# This is a very slow operation, thats why we only use it for
|
||||
# the text attribute
|
||||
lbound, rbound = s_attr.cpos_by_id(id)
|
||||
current_app.logger.info(f's_attrs.{s_attr.name}.lexicon.{id}.bounds')
|
||||
print(f's_attrs.{s_attr.name}.lexicon.{id}.bounds')
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'][id]['bounds'] = [lbound, rbound]
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'][id]['freqs'] = {}
|
||||
cpos_list = list(range(lbound, rbound + 1))
|
||||
cpos_list: List[int] = list(range(lbound, rbound + 1))
|
||||
for p_attr in cqi_p_attrs:
|
||||
p_attr_ids = []
|
||||
p_attr_ids: List[int] = []
|
||||
p_attr_ids.extend(p_attr.ids_by_cpos(cpos_list))
|
||||
current_app.logger.info(f's_attrs.{s_attr.name}.lexicon.{id}.freqs.{p_attr.name}')
|
||||
print(f's_attrs.{s_attr.name}.lexicon.{id}.freqs.{p_attr.name}')
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'][id]['freqs'][p_attr.name] = dict(Counter(p_attr_ids))
|
||||
del p_attr_ids
|
||||
del cpos_list
|
||||
|
||||
sub_s_attrs = cqi_corpus.structural_attributes.list(filters={'part_of': s_attr})
|
||||
current_app.logger.info(f's_attrs.{s_attr.name}.values')
|
||||
sub_s_attrs: List[CQiStructuralAttribute] = cqi_corpus.structural_attributes.list(filters={'part_of': s_attr})
|
||||
print(f's_attrs.{s_attr.name}.values')
|
||||
static_data['s_attrs'][s_attr.name]['values'] = [
|
||||
sub_s_attr.name[(len(s_attr.name) + 1):]
|
||||
for sub_s_attr in sub_s_attrs
|
||||
]
|
||||
s_attr_id_list = list(range(s_attr.size))
|
||||
sub_s_attr_values = []
|
||||
s_attr_id_list: List[int] = list(range(s_attr.size))
|
||||
sub_s_attr_values: List[str] = []
|
||||
for sub_s_attr in sub_s_attrs:
|
||||
tmp = []
|
||||
tmp.extend(sub_s_attr.values_by_ids(s_attr_id_list))
|
||||
sub_s_attr_values.append(tmp)
|
||||
del tmp
|
||||
del s_attr_id_list
|
||||
current_app.logger.info(f'values.s_attrs.{s_attr.name}')
|
||||
print(f'values.s_attrs.{s_attr.name}')
|
||||
static_data['values']['s_attrs'][s_attr.name] = [
|
||||
{
|
||||
s_attr_value_name: sub_s_attr_values[s_attr_value_name_idx][s_attr_id]
|
||||
@ -154,11 +160,11 @@ def ext_corpus_static_data(corpus: str) -> dict:
|
||||
} for s_attr_id in range(0, s_attr.size)
|
||||
]
|
||||
del sub_s_attr_values
|
||||
current_app.logger.info('Saving static data to file')
|
||||
print('Saving static data to file')
|
||||
with gzip.open(static_data_file_path, 'wt') as f:
|
||||
json.dump(static_data, f)
|
||||
del static_data
|
||||
current_app.logger.info('Sending static data to client')
|
||||
print('Sending static data to client')
|
||||
with open(static_data_file_path, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
@ -167,8 +173,8 @@ def ext_corpus_paginate_corpus(
|
||||
corpus: str,
|
||||
page: int = 1,
|
||||
per_page: int = 20
|
||||
) -> dict:
|
||||
cqi_client = CQiOverSocketIOSessionManager.get_cqi_client()
|
||||
) -> Dict:
|
||||
cqi_client: CQiClient = session['cqi_over_sio']['cqi_client']
|
||||
cqi_corpus = cqi_client.corpora.get(corpus)
|
||||
# Sanity checks
|
||||
if (
|
||||
@ -183,7 +189,7 @@ def ext_corpus_paginate_corpus(
|
||||
first_cpos = (page - 1) * per_page
|
||||
last_cpos = min(cqi_corpus.size, first_cpos + per_page)
|
||||
cpos_list = [*range(first_cpos, last_cpos)]
|
||||
lookups = _lookups_by_cpos(cqi_corpus, cpos_list)
|
||||
lookups = lookups_by_cpos(cqi_corpus, cpos_list)
|
||||
payload = {}
|
||||
# the items for the current page
|
||||
payload['items'] = [cpos_list]
|
||||
@ -213,9 +219,9 @@ def ext_cqp_paginate_subcorpus(
|
||||
context: int = 50,
|
||||
page: int = 1,
|
||||
per_page: int = 20
|
||||
) -> dict:
|
||||
) -> Dict:
|
||||
corpus_name, subcorpus_name = subcorpus.split(':', 1)
|
||||
cqi_client = CQiOverSocketIOSessionManager.get_cqi_client()
|
||||
cqi_client: CQiClient = session['cqi_over_sio']['cqi_client']
|
||||
cqi_corpus = cqi_client.corpora.get(corpus_name)
|
||||
cqi_subcorpus = cqi_corpus.subcorpora.get(subcorpus_name)
|
||||
# Sanity checks
|
||||
@ -230,7 +236,7 @@ def ext_cqp_paginate_subcorpus(
|
||||
return {'code': 416, 'msg': 'Range Not Satisfiable'}
|
||||
offset = (page - 1) * per_page
|
||||
cutoff = per_page
|
||||
cqi_results_export = _export_subcorpus(
|
||||
cqi_results_export = export_subcorpus(
|
||||
cqi_subcorpus, context=context, cutoff=cutoff, offset=offset)
|
||||
payload = {}
|
||||
# the items for the current page
|
||||
@ -260,147 +266,22 @@ def ext_cqp_partial_export_subcorpus(
|
||||
subcorpus: str,
|
||||
match_id_list: list,
|
||||
context: int = 50
|
||||
) -> dict:
|
||||
) -> Dict:
|
||||
corpus_name, subcorpus_name = subcorpus.split(':', 1)
|
||||
cqi_client = CQiOverSocketIOSessionManager.get_cqi_client()
|
||||
cqi_client: CQiClient = session['cqi_over_sio']['cqi_client']
|
||||
cqi_corpus = cqi_client.corpora.get(corpus_name)
|
||||
cqi_subcorpus = cqi_corpus.subcorpora.get(subcorpus_name)
|
||||
cqi_subcorpus_partial_export = _partial_export_subcorpus(cqi_subcorpus, match_id_list, context=context)
|
||||
cqi_subcorpus_partial_export = partial_export_subcorpus(cqi_subcorpus, match_id_list, context=context)
|
||||
return cqi_subcorpus_partial_export
|
||||
|
||||
|
||||
def ext_cqp_export_subcorpus(subcorpus: str, context: int = 50) -> dict:
|
||||
def ext_cqp_export_subcorpus(
|
||||
subcorpus: str,
|
||||
context: int = 50
|
||||
) -> Dict:
|
||||
corpus_name, subcorpus_name = subcorpus.split(':', 1)
|
||||
cqi_client = CQiOverSocketIOSessionManager.get_cqi_client()
|
||||
cqi_client: CQiClient = session['cqi_over_sio']['cqi_client']
|
||||
cqi_corpus = cqi_client.corpora.get(corpus_name)
|
||||
cqi_subcorpus = cqi_corpus.subcorpora.get(subcorpus_name)
|
||||
cqi_subcorpus_export = _export_subcorpus(cqi_subcorpus, context=context)
|
||||
cqi_subcorpus_export = export_subcorpus(cqi_subcorpus, context=context)
|
||||
return cqi_subcorpus_export
|
||||
|
||||
|
||||
def _lookups_by_cpos(corpus: CQiCorpus, cpos_list: list[int]) -> dict:
|
||||
lookups = {}
|
||||
lookups['cpos_lookup'] = {cpos: {} for cpos in cpos_list}
|
||||
for attr in corpus.positional_attributes.list():
|
||||
cpos_attr_values = attr.values_by_cpos(cpos_list)
|
||||
for i, cpos in enumerate(cpos_list):
|
||||
lookups['cpos_lookup'][cpos][attr.name] = cpos_attr_values[i]
|
||||
for attr in corpus.structural_attributes.list():
|
||||
# We only want to iterate over non subattributes, identifiable by
|
||||
# attr.has_values == False
|
||||
if attr.has_values:
|
||||
continue
|
||||
cpos_attr_ids = attr.ids_by_cpos(cpos_list)
|
||||
for i, cpos in enumerate(cpos_list):
|
||||
if cpos_attr_ids[i] == -1:
|
||||
continue
|
||||
lookups['cpos_lookup'][cpos][attr.name] = cpos_attr_ids[i]
|
||||
occured_attr_ids = [x for x in set(cpos_attr_ids) if x != -1]
|
||||
if len(occured_attr_ids) == 0:
|
||||
continue
|
||||
subattrs = corpus.structural_attributes.list(filters={'part_of': attr})
|
||||
if len(subattrs) == 0:
|
||||
continue
|
||||
lookup_name = f'{attr.name}_lookup'
|
||||
lookups[lookup_name] = {}
|
||||
for attr_id in occured_attr_ids:
|
||||
lookups[lookup_name][attr_id] = {}
|
||||
for subattr in subattrs:
|
||||
subattr_name = subattr.name[(len(attr.name) + 1):] # noqa
|
||||
for i, subattr_value in enumerate(subattr.values_by_ids(occured_attr_ids)): # noqa
|
||||
lookups[lookup_name][occured_attr_ids[i]][subattr_name] = subattr_value # noqa
|
||||
return lookups
|
||||
|
||||
|
||||
def _partial_export_subcorpus(
|
||||
subcorpus: CQiSubcorpus,
|
||||
match_id_list: list[int],
|
||||
context: int = 25
|
||||
) -> dict:
|
||||
if subcorpus.size == 0:
|
||||
return {'matches': []}
|
||||
match_boundaries = []
|
||||
for match_id in match_id_list:
|
||||
if match_id < 0 or match_id >= subcorpus.size:
|
||||
continue
|
||||
match_boundaries.append(
|
||||
(
|
||||
match_id,
|
||||
subcorpus.dump(subcorpus.fields['match'], match_id, match_id)[0],
|
||||
subcorpus.dump(subcorpus.fields['matchend'], match_id, match_id)[0]
|
||||
)
|
||||
)
|
||||
cpos_set = set()
|
||||
matches = []
|
||||
for match_boundary in match_boundaries:
|
||||
match_num, match_start, match_end = match_boundary
|
||||
c = (match_start, match_end)
|
||||
if match_start == 0 or context == 0:
|
||||
lc = None
|
||||
cpos_list_lbound = match_start
|
||||
else:
|
||||
lc_lbound = max(0, (match_start - context))
|
||||
lc_rbound = match_start - 1
|
||||
lc = (lc_lbound, lc_rbound)
|
||||
cpos_list_lbound = lc_lbound
|
||||
if match_end == (subcorpus.collection.corpus.size - 1) or context == 0:
|
||||
rc = None
|
||||
cpos_list_rbound = match_end
|
||||
else:
|
||||
rc_lbound = match_end + 1
|
||||
rc_rbound = min(
|
||||
(match_end + context),
|
||||
(subcorpus.collection.corpus.size - 1)
|
||||
)
|
||||
rc = (rc_lbound, rc_rbound)
|
||||
cpos_list_rbound = rc_rbound
|
||||
match = {'num': match_num, 'lc': lc, 'c': c, 'rc': rc}
|
||||
matches.append(match)
|
||||
cpos_set.update(range(cpos_list_lbound, cpos_list_rbound + 1))
|
||||
lookups = _lookups_by_cpos(subcorpus.collection.corpus, list(cpos_set))
|
||||
return {'matches': matches, **lookups}
|
||||
|
||||
|
||||
def _export_subcorpus(
|
||||
subcorpus: CQiSubcorpus,
|
||||
context: int = 25,
|
||||
cutoff: float = float('inf'),
|
||||
offset: int = 0
|
||||
) -> dict:
|
||||
if subcorpus.size == 0:
|
||||
return {'matches': []}
|
||||
first_match = max(0, offset)
|
||||
last_match = min((offset + cutoff - 1), (subcorpus.size - 1))
|
||||
match_boundaries = zip(
|
||||
range(first_match, last_match + 1),
|
||||
subcorpus.dump(subcorpus.fields['match'], first_match, last_match),
|
||||
subcorpus.dump(subcorpus.fields['matchend'], first_match, last_match)
|
||||
)
|
||||
cpos_set = set()
|
||||
matches = []
|
||||
for match_num, match_start, match_end in match_boundaries:
|
||||
c = (match_start, match_end)
|
||||
if match_start == 0 or context == 0:
|
||||
lc = None
|
||||
cpos_list_lbound = match_start
|
||||
else:
|
||||
lc_lbound = max(0, (match_start - context))
|
||||
lc_rbound = match_start - 1
|
||||
lc = (lc_lbound, lc_rbound)
|
||||
cpos_list_lbound = lc_lbound
|
||||
if match_end == (subcorpus.collection.corpus.size - 1) or context == 0:
|
||||
rc = None
|
||||
cpos_list_rbound = match_end
|
||||
else:
|
||||
rc_lbound = match_end + 1
|
||||
rc_rbound = min(
|
||||
(match_end + context),
|
||||
(subcorpus.collection.corpus.size - 1)
|
||||
)
|
||||
rc = (rc_lbound, rc_rbound)
|
||||
cpos_list_rbound = rc_rbound
|
||||
match = {'num': match_num, 'lc': lc, 'c': c, 'rc': rc}
|
||||
matches.append(match)
|
||||
cpos_set.update(range(cpos_list_lbound, cpos_list_rbound + 1))
|
||||
lookups = _lookups_by_cpos(subcorpus.collection.corpus, list(cpos_set))
|
||||
return {'matches': matches, **lookups}
|
131
app/corpora/cqi_over_sio/utils.py
Normal file
131
app/corpora/cqi_over_sio/utils.py
Normal file
@ -0,0 +1,131 @@
|
||||
from cqi.models.corpora import Corpus as CQiCorpus
|
||||
from cqi.models.subcorpora import Subcorpus as CQiSubcorpus
|
||||
from typing import Dict, List
|
||||
|
||||
|
||||
def lookups_by_cpos(corpus: CQiCorpus, cpos_list: List[int]) -> Dict:
|
||||
lookups = {}
|
||||
lookups['cpos_lookup'] = {cpos: {} for cpos in cpos_list}
|
||||
for attr in corpus.positional_attributes.list():
|
||||
cpos_attr_values: List[str] = attr.values_by_cpos(cpos_list)
|
||||
for i, cpos in enumerate(cpos_list):
|
||||
lookups['cpos_lookup'][cpos][attr.name] = cpos_attr_values[i]
|
||||
for attr in corpus.structural_attributes.list():
|
||||
# We only want to iterate over non subattributes, identifiable by
|
||||
# attr.has_values == False
|
||||
if attr.has_values:
|
||||
continue
|
||||
cpos_attr_ids: List[int] = attr.ids_by_cpos(cpos_list)
|
||||
for i, cpos in enumerate(cpos_list):
|
||||
if cpos_attr_ids[i] == -1:
|
||||
continue
|
||||
lookups['cpos_lookup'][cpos][attr.name] = cpos_attr_ids[i]
|
||||
occured_attr_ids = [x for x in set(cpos_attr_ids) if x != -1]
|
||||
if len(occured_attr_ids) == 0:
|
||||
continue
|
||||
subattrs = corpus.structural_attributes.list(filters={'part_of': attr})
|
||||
if len(subattrs) == 0:
|
||||
continue
|
||||
lookup_name: str = f'{attr.name}_lookup'
|
||||
lookups[lookup_name] = {}
|
||||
for attr_id in occured_attr_ids:
|
||||
lookups[lookup_name][attr_id] = {}
|
||||
for subattr in subattrs:
|
||||
subattr_name = subattr.name[(len(attr.name) + 1):] # noqa
|
||||
for i, subattr_value in enumerate(subattr.values_by_ids(occured_attr_ids)): # noqa
|
||||
lookups[lookup_name][occured_attr_ids[i]][subattr_name] = subattr_value # noqa
|
||||
return lookups
|
||||
|
||||
|
||||
def partial_export_subcorpus(
|
||||
subcorpus: CQiSubcorpus,
|
||||
match_id_list: List[int],
|
||||
context: int = 25
|
||||
) -> Dict:
|
||||
if subcorpus.size == 0:
|
||||
return {"matches": []}
|
||||
match_boundaries = []
|
||||
for match_id in match_id_list:
|
||||
if match_id < 0 or match_id >= subcorpus.size:
|
||||
continue
|
||||
match_boundaries.append(
|
||||
(
|
||||
match_id,
|
||||
subcorpus.dump(subcorpus.fields['match'], match_id, match_id)[0],
|
||||
subcorpus.dump(subcorpus.fields['matchend'], match_id, match_id)[0]
|
||||
)
|
||||
)
|
||||
cpos_set = set()
|
||||
matches = []
|
||||
for match_boundary in match_boundaries:
|
||||
match_num, match_start, match_end = match_boundary
|
||||
c = (match_start, match_end)
|
||||
if match_start == 0 or context == 0:
|
||||
lc = None
|
||||
cpos_list_lbound = match_start
|
||||
else:
|
||||
lc_lbound = max(0, (match_start - context))
|
||||
lc_rbound = match_start - 1
|
||||
lc = (lc_lbound, lc_rbound)
|
||||
cpos_list_lbound = lc_lbound
|
||||
if match_end == (subcorpus.collection.corpus.size - 1) or context == 0:
|
||||
rc = None
|
||||
cpos_list_rbound = match_end
|
||||
else:
|
||||
rc_lbound = match_end + 1
|
||||
rc_rbound = min(
|
||||
(match_end + context),
|
||||
(subcorpus.collection.corpus.size - 1)
|
||||
)
|
||||
rc = (rc_lbound, rc_rbound)
|
||||
cpos_list_rbound = rc_rbound
|
||||
match = {'num': match_num, 'lc': lc, 'c': c, 'rc': rc}
|
||||
matches.append(match)
|
||||
cpos_set.update(range(cpos_list_lbound, cpos_list_rbound + 1))
|
||||
lookups = lookups_by_cpos(subcorpus.collection.corpus, list(cpos_set))
|
||||
return {'matches': matches, **lookups}
|
||||
|
||||
|
||||
def export_subcorpus(
|
||||
subcorpus: CQiSubcorpus,
|
||||
context: int = 25,
|
||||
cutoff: float = float('inf'),
|
||||
offset: int = 0
|
||||
) -> Dict:
|
||||
if subcorpus.size == 0:
|
||||
return {"matches": []}
|
||||
first_match = max(0, offset)
|
||||
last_match = min((offset + cutoff - 1), (subcorpus.size - 1))
|
||||
match_boundaries = zip(
|
||||
range(first_match, last_match + 1),
|
||||
subcorpus.dump(subcorpus.fields['match'], first_match, last_match),
|
||||
subcorpus.dump(subcorpus.fields['matchend'], first_match, last_match)
|
||||
)
|
||||
cpos_set = set()
|
||||
matches = []
|
||||
for match_num, match_start, match_end in match_boundaries:
|
||||
c = (match_start, match_end)
|
||||
if match_start == 0 or context == 0:
|
||||
lc = None
|
||||
cpos_list_lbound = match_start
|
||||
else:
|
||||
lc_lbound = max(0, (match_start - context))
|
||||
lc_rbound = match_start - 1
|
||||
lc = (lc_lbound, lc_rbound)
|
||||
cpos_list_lbound = lc_lbound
|
||||
if match_end == (subcorpus.collection.corpus.size - 1) or context == 0:
|
||||
rc = None
|
||||
cpos_list_rbound = match_end
|
||||
else:
|
||||
rc_lbound = match_end + 1
|
||||
rc_rbound = min(
|
||||
(match_end + context),
|
||||
(subcorpus.collection.corpus.size - 1)
|
||||
)
|
||||
rc = (rc_lbound, rc_rbound)
|
||||
cpos_list_rbound = rc_rbound
|
||||
match = {'num': match_num, 'lc': lc, 'c': c, 'rc': rc}
|
||||
matches.append(match)
|
||||
cpos_set.update(range(cpos_list_lbound, cpos_list_rbound + 1))
|
||||
lookups = lookups_by_cpos(subcorpus.collection.corpus, list(cpos_set))
|
||||
return {'matches': matches, **lookups}
|
@ -10,7 +10,7 @@ def corpus_follower_permission_required(*permissions):
|
||||
def decorated_function(*args, **kwargs):
|
||||
corpus_id = kwargs.get('corpus_id')
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if not (corpus.user == current_user or current_user.is_administrator):
|
||||
if not (corpus.user == current_user or current_user.is_administrator()):
|
||||
cfa = CorpusFollowerAssociation.query.filter_by(corpus_id=corpus_id, follower_id=current_user.id).first()
|
||||
if cfa is None:
|
||||
abort(403)
|
||||
@ -26,7 +26,7 @@ def corpus_owner_or_admin_required(f):
|
||||
def decorated_function(*args, **kwargs):
|
||||
corpus_id = kwargs.get('corpus_id')
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if not (corpus.user == current_user or current_user.is_administrator):
|
||||
if not (corpus.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
@ -15,7 +15,7 @@ def get_corpus(corpus_hashid):
|
||||
if not (
|
||||
corpus.is_public
|
||||
or corpus.user == current_user
|
||||
or current_user.is_administrator
|
||||
or current_user.is_administrator()
|
||||
):
|
||||
return {'options': {'status': 403, 'statusText': 'Forbidden'}}
|
||||
return {
|
||||
@ -38,7 +38,7 @@ def subscribe_corpus(corpus_hashid):
|
||||
if not (
|
||||
corpus.is_public
|
||||
or corpus.user == current_user
|
||||
or current_user.is_administrator
|
||||
or current_user.is_administrator()
|
||||
):
|
||||
return {'options': {'status': 403, 'statusText': 'Forbidden'}}
|
||||
join_room(f'/corpora/{corpus.hashid}')
|
@ -1,7 +1,7 @@
|
||||
from flask import current_app
|
||||
from flask import abort, current_app
|
||||
from threading import Thread
|
||||
from app.decorators import content_negotiation
|
||||
from app import db
|
||||
from app.decorators import content_negotiation
|
||||
from app.models import CorpusFile
|
||||
from ..decorators import corpus_follower_permission_required
|
||||
from . import bp
|
@ -6,19 +6,24 @@ from flask import (
|
||||
send_from_directory,
|
||||
url_for
|
||||
)
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from app import db
|
||||
from app.models import Corpus, CorpusFile, CorpusStatus
|
||||
from ..decorators import corpus_follower_permission_required
|
||||
from ..utils import corpus_endpoint_arguments_constructor as corpus_eac
|
||||
from . import bp
|
||||
from .forms import CreateCorpusFileForm, UpdateCorpusFileForm
|
||||
from .utils import corpus_file_dynamic_list_constructor as corpus_file_dlc
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/files')
|
||||
@register_breadcrumb(bp, '.entity.files', 'Files', endpoint_arguments_constructor=corpus_eac)
|
||||
def corpus_files(corpus_id):
|
||||
return redirect(url_for('.corpus', _anchor='files', corpus_id=corpus_id))
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/files/create', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.entity.files.create', 'Create', endpoint_arguments_constructor=corpus_eac)
|
||||
@corpus_follower_permission_required('MANAGE_FILES')
|
||||
def create_corpus_file(corpus_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
@ -60,6 +65,7 @@ def create_corpus_file(corpus_id):
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/files/<hashid:corpus_file_id>', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.entity.files.entity', '', dynamic_list_constructor=corpus_file_dlc)
|
||||
@corpus_follower_permission_required('MANAGE_FILES')
|
||||
def corpus_file(corpus_id, corpus_file_id):
|
||||
corpus_file = CorpusFile.query.filter_by(corpus_id=corpus_id, id=corpus_file_id).first_or_404()
|
||||
@ -88,6 +94,6 @@ def download_corpus_file(corpus_id, corpus_file_id):
|
||||
corpus_file.path.parent,
|
||||
corpus_file.path.name,
|
||||
as_attachment=True,
|
||||
download_name=corpus_file.filename,
|
||||
attachment_filename=corpus_file.filename,
|
||||
mimetype=corpus_file.mimetype
|
||||
)
|
15
app/corpora/files/utils.py
Normal file
15
app/corpora/files/utils.py
Normal file
@ -0,0 +1,15 @@
|
||||
from flask import request, url_for
|
||||
from app.models import CorpusFile
|
||||
from ..utils import corpus_endpoint_arguments_constructor as corpus_eac
|
||||
|
||||
|
||||
def corpus_file_dynamic_list_constructor():
|
||||
corpus_id = request.view_args['corpus_id']
|
||||
corpus_file_id = request.view_args['corpus_file_id']
|
||||
corpus_file = CorpusFile.query.filter_by(corpus_id=corpus_id, id=corpus_file_id).first_or_404()
|
||||
return [
|
||||
{
|
||||
'text': f'{corpus_file.author}: {corpus_file.title} ({corpus_file.publishing_year})',
|
||||
'url': url_for('.corpus_file', corpus_id=corpus_id, corpus_file_id=corpus_file_id)
|
||||
}
|
||||
]
|
@ -58,7 +58,7 @@ def delete_corpus_follower(corpus_id, follower_id):
|
||||
current_user.id == follower_id
|
||||
or current_user == cfa.corpus.user
|
||||
or CorpusFollowerAssociation.query.filter_by(corpus_id=corpus_id, follower_id=current_user.id).first().role.has_permission('MANAGE_FOLLOWERS')
|
||||
or current_user.is_administrator):
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
if current_user.id == follower_id:
|
||||
flash(f'You are no longer following "{cfa.corpus.title}"', 'corpus')
|
@ -1,4 +1,5 @@
|
||||
from flask import abort, flash, redirect, render_template, url_for
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from flask_login import current_user
|
||||
from app import db
|
||||
from app.models import (
|
||||
@ -10,14 +11,20 @@ from app.models import (
|
||||
from . import bp
|
||||
from .decorators import corpus_follower_permission_required
|
||||
from .forms import CreateCorpusForm
|
||||
from .utils import (
|
||||
corpus_endpoint_arguments_constructor as corpus_eac,
|
||||
corpus_dynamic_list_constructor as corpus_dlc
|
||||
)
|
||||
|
||||
|
||||
@bp.route('')
|
||||
@register_breadcrumb(bp, '.', '<i class="nopaque-icons left">I</i>My Corpora')
|
||||
def corpora():
|
||||
return redirect(url_for('main.dashboard', _anchor='corpora'))
|
||||
|
||||
|
||||
@bp.route('/create', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.create', 'Create')
|
||||
def create_corpus():
|
||||
form = CreateCorpusForm()
|
||||
if form.validate_on_submit():
|
||||
@ -40,6 +47,7 @@ def create_corpus():
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>')
|
||||
@register_breadcrumb(bp, '.entity', '', dynamic_list_constructor=corpus_dlc)
|
||||
def corpus(corpus_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
cfrs = CorpusFollowerRole.query.all()
|
||||
@ -47,13 +55,13 @@ def corpus(corpus_id):
|
||||
users = User.query.filter(User.is_public == True, User.id != current_user.id, User.id != corpus.user.id, User.role_id < 4).all()
|
||||
cfa = CorpusFollowerAssociation.query.filter_by(corpus_id=corpus_id, follower_id=current_user.id).first()
|
||||
if cfa is None:
|
||||
if corpus.user == current_user or current_user.is_administrator:
|
||||
if corpus.user == current_user or current_user.is_administrator():
|
||||
cfr = CorpusFollowerRole.query.filter_by(name='Administrator').first()
|
||||
else:
|
||||
cfr = CorpusFollowerRole.query.filter_by(name='Anonymous').first()
|
||||
else:
|
||||
cfr = cfa.role
|
||||
if corpus.user == current_user or current_user.is_administrator:
|
||||
if corpus.user == current_user or current_user.is_administrator():
|
||||
return render_template(
|
||||
'corpora/corpus.html.j2',
|
||||
title=corpus.title,
|
||||
@ -79,6 +87,7 @@ def corpus(corpus_id):
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/analysis')
|
||||
@corpus_follower_permission_required('VIEW')
|
||||
@register_breadcrumb(bp, '.entity.analysis', 'Analysis', endpoint_arguments_constructor=corpus_eac)
|
||||
def analysis(corpus_id):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
return render_template(
|
||||
@ -99,11 +108,13 @@ def follow_corpus(corpus_id, token):
|
||||
|
||||
|
||||
@bp.route('/import', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.import', 'Import')
|
||||
def import_corpus():
|
||||
abort(503)
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/export')
|
||||
@corpus_follower_permission_required('VIEW')
|
||||
@register_breadcrumb(bp, '.entity.export', 'Export', endpoint_arguments_constructor=corpus_eac)
|
||||
def export_corpus(corpus_id):
|
||||
abort(503)
|
17
app/corpora/utils.py
Normal file
17
app/corpora/utils.py
Normal file
@ -0,0 +1,17 @@
|
||||
from flask import request, url_for
|
||||
from app.models import Corpus
|
||||
|
||||
|
||||
def corpus_endpoint_arguments_constructor():
|
||||
return {'corpus_id': request.view_args['corpus_id']}
|
||||
|
||||
|
||||
def corpus_dynamic_list_constructor():
|
||||
corpus_id = request.view_args['corpus_id']
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
return [
|
||||
{
|
||||
'text': f'<i class="material-icons left">book</i>{corpus.title}',
|
||||
'url': url_for('.corpus', corpus_id=corpus_id)
|
||||
}
|
||||
]
|
11
app/daemon/__init__.py
Normal file
11
app/daemon/__init__.py
Normal file
@ -0,0 +1,11 @@
|
||||
from app import db
|
||||
from flask import Flask
|
||||
from .corpus_utils import check_corpora
|
||||
from .job_utils import check_jobs
|
||||
|
||||
|
||||
def daemon(app: Flask):
|
||||
with app.app_context():
|
||||
check_corpora()
|
||||
check_jobs()
|
||||
db.session.commit()
|
@ -1,4 +1,4 @@
|
||||
from app import db, docker_client, scheduler
|
||||
from app import docker_client
|
||||
from app.models import Corpus, CorpusStatus
|
||||
from flask import current_app
|
||||
import docker
|
||||
@ -6,11 +6,7 @@ import os
|
||||
import shutil
|
||||
|
||||
|
||||
def job():
|
||||
with scheduler.app.app_context():
|
||||
_handle_corpora()
|
||||
|
||||
def _handle_corpora():
|
||||
def check_corpora():
|
||||
corpora = Corpus.query.all()
|
||||
for corpus in [x for x in corpora if x.status == CorpusStatus.SUBMITTED]:
|
||||
_create_build_corpus_service(corpus)
|
||||
@ -26,7 +22,6 @@ def _handle_corpora():
|
||||
_create_cqpserver_container(corpus)
|
||||
for corpus in [x for x in corpora if x.status == CorpusStatus.CANCELING_ANALYSIS_SESSION]:
|
||||
_remove_cqpserver_container(corpus)
|
||||
db.session.commit()
|
||||
|
||||
def _create_build_corpus_service(corpus):
|
||||
''' # Docker service settings # '''
|
@ -1,4 +1,4 @@
|
||||
from app import db, docker_client, hashids, scheduler
|
||||
from app import db, docker_client, hashids
|
||||
from app.models import (
|
||||
Job,
|
||||
JobResult,
|
||||
@ -15,11 +15,7 @@ import os
|
||||
import shutil
|
||||
|
||||
|
||||
def job():
|
||||
with scheduler.app.app_context():
|
||||
_handle_jobs()
|
||||
|
||||
def _handle_jobs():
|
||||
def check_jobs():
|
||||
jobs = Job.query.all()
|
||||
for job in [x for x in jobs if x.status == JobStatus.SUBMITTED]:
|
||||
_create_job_service(job)
|
||||
@ -27,7 +23,6 @@ def _handle_jobs():
|
||||
_checkout_job_service(job)
|
||||
for job in [x for x in jobs if x.status == JobStatus.CANCELING]:
|
||||
_remove_job_service(job)
|
||||
db.session.commit()
|
||||
|
||||
def _create_job_service(job):
|
||||
''' # Docker service settings # '''
|
@ -1,7 +1,8 @@
|
||||
from flask import abort, request
|
||||
from flask import abort, current_app, request
|
||||
from flask_login import current_user
|
||||
from functools import wraps
|
||||
from typing import Optional
|
||||
from threading import Thread
|
||||
from typing import List, Union
|
||||
from werkzeug.exceptions import NotAcceptable
|
||||
from app.models import Permission
|
||||
|
||||
@ -23,21 +24,22 @@ def admin_required(f):
|
||||
|
||||
def socketio_login_required(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
def decorated_function(*args, **kwargs):
|
||||
if current_user.is_authenticated:
|
||||
return f(*args, **kwargs)
|
||||
return {'code': 401, 'body': 'Unauthorized'}
|
||||
return wrapper
|
||||
else:
|
||||
return {'code': 401, 'msg': 'Unauthorized'}
|
||||
return decorated_function
|
||||
|
||||
|
||||
def socketio_permission_required(permission):
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
def decorated_function(*args, **kwargs):
|
||||
if not current_user.can(permission):
|
||||
return {'code': 403, 'body': 'Forbidden'}
|
||||
return {'code': 403, 'msg': 'Forbidden'}
|
||||
return f(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorated_function
|
||||
return decorator
|
||||
|
||||
|
||||
@ -45,9 +47,27 @@ def socketio_admin_required(f):
|
||||
return socketio_permission_required(Permission.ADMINISTRATE)(f)
|
||||
|
||||
|
||||
def background(f):
|
||||
'''
|
||||
' This decorator executes a function in a Thread.
|
||||
' Decorated functions need to be executed within a code block where an
|
||||
' app context exists.
|
||||
'
|
||||
' NOTE: An app object is passed as a keyword argument to the decorated
|
||||
' function.
|
||||
'''
|
||||
@wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
kwargs['app'] = current_app._get_current_object()
|
||||
thread = Thread(target=f, args=args, kwargs=kwargs)
|
||||
thread.start()
|
||||
return thread
|
||||
return wrapped
|
||||
|
||||
|
||||
def content_negotiation(
|
||||
produces: Optional[str | list[str]] = None,
|
||||
consumes: Optional[str | list[str]] = None
|
||||
produces: Union[str, List[str], None] = None,
|
||||
consumes: Union[str, List[str], None] = None
|
||||
):
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
|
31
app/email.py
31
app/email.py
@ -1,32 +1,25 @@
|
||||
from flask import current_app, Flask, render_template
|
||||
from flask import current_app, render_template
|
||||
from flask_mail import Message
|
||||
from threading import Thread
|
||||
from app import mail
|
||||
|
||||
|
||||
def create_message(
|
||||
recipient: str,
|
||||
subject: str,
|
||||
template: str,
|
||||
**context
|
||||
) -> Message:
|
||||
message = Message(
|
||||
body=render_template(f'{template}.txt.j2', **context),
|
||||
html=render_template(f'{template}.html.j2', **context),
|
||||
def create_message(recipient, subject, template, **kwargs):
|
||||
subject_prefix: str = current_app.config['NOPAQUE_MAIL_SUBJECT_PREFIX']
|
||||
msg: Message = Message(
|
||||
body=render_template(f'{template}.txt.j2', **kwargs),
|
||||
html=render_template(f'{template}.html.j2', **kwargs),
|
||||
recipients=[recipient],
|
||||
subject=f'[nopaque] {subject}'
|
||||
subject=f'{subject_prefix} {subject}'
|
||||
)
|
||||
return message
|
||||
return msg
|
||||
|
||||
|
||||
def send(message: Message) -> Thread:
|
||||
def _send(app: Flask, message: Message):
|
||||
def send(msg, *args, **kwargs):
|
||||
def _send(app, msg):
|
||||
with app.app_context():
|
||||
mail.send(message)
|
||||
mail.send(msg)
|
||||
|
||||
thread = Thread(
|
||||
target=_send,
|
||||
args=[current_app._get_current_object(), message]
|
||||
)
|
||||
thread = Thread(target=_send, args=[current_app._get_current_object(), msg])
|
||||
thread.start()
|
||||
return thread
|
||||
|
2
app/ext/flask_sqlalchemy/__init__.py
Normal file
2
app/ext/flask_sqlalchemy/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from .container_column import ContainerColumn
|
||||
from .int_enum_column import IntEnumColumn
|
21
app/ext/flask_sqlalchemy/container_column.py
Normal file
21
app/ext/flask_sqlalchemy/container_column.py
Normal file
@ -0,0 +1,21 @@
|
||||
import json
|
||||
from app import db
|
||||
|
||||
|
||||
class ContainerColumn(db.TypeDecorator):
|
||||
impl = db.String
|
||||
|
||||
def __init__(self, container_type, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.container_type = container_type
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if isinstance(value, self.container_type):
|
||||
return json.dumps(value)
|
||||
elif isinstance(value, str) and isinstance(json.loads(value), self.container_type):
|
||||
return value
|
||||
else:
|
||||
return TypeError()
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
return json.loads(value)
|
@ -1,26 +1,6 @@
|
||||
import json
|
||||
from app import db
|
||||
|
||||
|
||||
class ContainerColumn(db.TypeDecorator):
|
||||
impl = db.String
|
||||
|
||||
def __init__(self, container_type, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.container_type = container_type
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if isinstance(value, self.container_type):
|
||||
return json.dumps(value)
|
||||
elif isinstance(value, str) and isinstance(json.loads(value), self.container_type):
|
||||
return value
|
||||
else:
|
||||
return TypeError()
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
return json.loads(value)
|
||||
|
||||
|
||||
class IntEnumColumn(db.TypeDecorator):
|
||||
impl = db.Integer
|
||||
|
@ -1,2 +0,0 @@
|
||||
from .types import ContainerColumn
|
||||
from .types import IntEnumColumn
|
@ -1,2 +1,18 @@
|
||||
from .handle_corpora import job as handle_corpora
|
||||
from .handle_jobs import job as handle_jobs
|
||||
from flask import Blueprint
|
||||
from flask_login import login_required
|
||||
|
||||
|
||||
bp = Blueprint('jobs', __name__)
|
||||
|
||||
|
||||
@bp.before_request
|
||||
@login_required
|
||||
def before_request():
|
||||
'''
|
||||
Ensures that the routes in this package can only be visited by users that
|
||||
are logged in.
|
||||
'''
|
||||
pass
|
||||
|
||||
|
||||
from . import routes, json_routes
|
||||
|
@ -17,7 +17,7 @@ def delete_job(job_id):
|
||||
db.session.commit()
|
||||
|
||||
job = Job.query.get_or_404(job_id)
|
||||
if not (job.user == current_user or current_user.is_administrator):
|
||||
if not (job.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
thread = Thread(
|
||||
target=_delete_job,
|
||||
@ -56,7 +56,7 @@ def restart_job(job_id):
|
||||
db.session.commit()
|
||||
|
||||
job = Job.query.get_or_404(job_id)
|
||||
if not (job.user == current_user or current_user.is_administrator):
|
||||
if not (job.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
if job.status == JobStatus.FAILED:
|
||||
response = {'errors': {'message': 'Job status is not "failed"'}}
|
@ -5,20 +5,24 @@ from flask import (
|
||||
send_from_directory,
|
||||
url_for
|
||||
)
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from flask_login import current_user
|
||||
from app.models import Job, JobInput, JobResult
|
||||
from . import bp
|
||||
from .utils import job_dynamic_list_constructor as job_dlc
|
||||
|
||||
|
||||
@bp.route('')
|
||||
def jobs():
|
||||
@register_breadcrumb(bp, '.', '<i class="nopaque-icons left">J</i>My Jobs')
|
||||
def corpora():
|
||||
return redirect(url_for('main.dashboard', _anchor='jobs'))
|
||||
|
||||
|
||||
@bp.route('/<hashid:job_id>')
|
||||
@register_breadcrumb(bp, '.entity', '', dynamic_list_constructor=job_dlc)
|
||||
def job(job_id):
|
||||
job = Job.query.get_or_404(job_id)
|
||||
if not (job.user == current_user or current_user.is_administrator):
|
||||
if not (job.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return render_template(
|
||||
'jobs/job.html.j2',
|
||||
@ -30,13 +34,13 @@ def job(job_id):
|
||||
@bp.route('/<hashid:job_id>/inputs/<hashid:job_input_id>/download')
|
||||
def download_job_input(job_id, job_input_id):
|
||||
job_input = JobInput.query.filter_by(job_id=job_id, id=job_input_id).first_or_404()
|
||||
if not (job_input.job.user == current_user or current_user.is_administrator):
|
||||
if not (job_input.job.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return send_from_directory(
|
||||
job_input.path.parent,
|
||||
job_input.path.name,
|
||||
as_attachment=True,
|
||||
download_name=job_input.filename,
|
||||
attachment_filename=job_input.filename,
|
||||
mimetype=job_input.mimetype
|
||||
)
|
||||
|
||||
@ -44,12 +48,12 @@ def download_job_input(job_id, job_input_id):
|
||||
@bp.route('/<hashid:job_id>/results/<hashid:job_result_id>/download')
|
||||
def download_job_result(job_id, job_result_id):
|
||||
job_result = JobResult.query.filter_by(job_id=job_id, id=job_result_id).first_or_404()
|
||||
if not (job_result.job.user == current_user or current_user.is_administrator):
|
||||
if not (job_result.job.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return send_from_directory(
|
||||
job_result.path.parent,
|
||||
job_result.path.name,
|
||||
as_attachment=True,
|
||||
download_name=job_result.filename,
|
||||
attachment_filename=job_result.filename,
|
||||
mimetype=job_result.mimetype
|
||||
)
|
13
app/jobs/utils.py
Normal file
13
app/jobs/utils.py
Normal file
@ -0,0 +1,13 @@
|
||||
from flask import request, url_for
|
||||
from app.models import Job
|
||||
|
||||
|
||||
def job_dynamic_list_constructor():
|
||||
job_id = request.view_args['job_id']
|
||||
job = Job.query.get_or_404(job_id)
|
||||
return [
|
||||
{
|
||||
'text': f'<i class="nopaque-icons left service-icons" data-service="{job.service}"></i>{job.title}',
|
||||
'url': url_for('.job', job_id=job_id)
|
||||
}
|
||||
]
|
@ -1,9 +1,8 @@
|
||||
from flask import current_app
|
||||
from flask_migrate import upgrade
|
||||
from pathlib import Path
|
||||
from app import db
|
||||
from typing import List
|
||||
from app.models import (
|
||||
Corpus,
|
||||
CorpusFollowerRole,
|
||||
Role,
|
||||
SpaCyNLPPipelineModel,
|
||||
@ -16,10 +15,10 @@ from . import bp
|
||||
@bp.cli.command('deploy')
|
||||
def deploy():
|
||||
''' Run deployment tasks. '''
|
||||
|
||||
# Make default directories
|
||||
print('Make default directories')
|
||||
base_dir = current_app.config['NOPAQUE_DATA_DIR']
|
||||
default_dirs: list[Path] = [
|
||||
default_dirs: List[Path] = [
|
||||
base_dir / 'tmp',
|
||||
base_dir / 'users'
|
||||
]
|
||||
@ -29,9 +28,11 @@ def deploy():
|
||||
if not default_dir.is_dir():
|
||||
raise NotADirectoryError(f'{default_dir} is not a directory')
|
||||
|
||||
# migrate database to latest revision
|
||||
print('Migrate database to latest revision')
|
||||
upgrade()
|
||||
|
||||
# Insert/Update default database values
|
||||
print('Insert/Update default Roles')
|
||||
Role.insert_defaults()
|
||||
print('Insert/Update default Users')
|
||||
@ -43,9 +44,4 @@ def deploy():
|
||||
print('Insert/Update default TesseractOCRPipelineModels')
|
||||
TesseractOCRPipelineModel.insert_defaults()
|
||||
|
||||
print('Stop running analysis sessions')
|
||||
for corpus in Corpus.query.filter(Corpus.num_analysis_sessions > 0).all():
|
||||
corpus.num_analysis_sessions = 0
|
||||
db.session.commit()
|
||||
|
||||
# TODO: Implement checks for if the nopaque network exists
|
@ -1,11 +1,14 @@
|
||||
from flask import flash, redirect, render_template, url_for
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from flask_login import current_user, login_required, login_user
|
||||
from app.blueprints.auth.forms import LoginForm
|
||||
from app.auth.forms import LoginForm
|
||||
from app.models import Corpus, User
|
||||
from sqlalchemy import or_
|
||||
from . import bp
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.', '<i class="material-icons">home</i>')
|
||||
def index():
|
||||
form = LoginForm()
|
||||
if form.validate_on_submit():
|
||||
@ -24,6 +27,7 @@ def index():
|
||||
|
||||
|
||||
@bp.route('/faq')
|
||||
@register_breadcrumb(bp, '.faq', 'Frequently Asked Questions')
|
||||
def faq():
|
||||
return render_template(
|
||||
'main/faq.html.j2',
|
||||
@ -32,6 +36,7 @@ def faq():
|
||||
|
||||
|
||||
@bp.route('/dashboard')
|
||||
@register_breadcrumb(bp, '.dashboard', '<i class="material-icons left">dashboard</i>Dashboard')
|
||||
@login_required
|
||||
def dashboard():
|
||||
return render_template(
|
||||
@ -40,15 +45,8 @@ def dashboard():
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/manual')
|
||||
def manual():
|
||||
return render_template(
|
||||
'main/manual.html.j2',
|
||||
title='Manual'
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/news')
|
||||
@register_breadcrumb(bp, '.news', '<i class="material-icons left">email</i>News')
|
||||
def news():
|
||||
return render_template(
|
||||
'main/news.html.j2',
|
||||
@ -57,6 +55,7 @@ def news():
|
||||
|
||||
|
||||
@bp.route('/privacy_policy')
|
||||
@register_breadcrumb(bp, '.privacy_policy', 'Private statement (GDPR)')
|
||||
def privacy_policy():
|
||||
return render_template(
|
||||
'main/privacy_policy.html.j2',
|
||||
@ -65,6 +64,7 @@ def privacy_policy():
|
||||
|
||||
|
||||
@bp.route('/terms_of_use')
|
||||
@register_breadcrumb(bp, '.terms_of_use', 'Terms of Use')
|
||||
def terms_of_use():
|
||||
return render_template(
|
||||
'main/terms_of_use.html.j2',
|
||||
@ -73,6 +73,7 @@ def terms_of_use():
|
||||
|
||||
|
||||
@bp.route('/social-area')
|
||||
@register_breadcrumb(bp, '.social_area', '<i class="material-icons left">group</i>Social Area')
|
||||
@login_required
|
||||
def social_area():
|
||||
print('test')
|
@ -1,4 +1,3 @@
|
||||
from .anonymous_user import *
|
||||
from .avatar import *
|
||||
from .corpus_file import *
|
||||
from .corpus_follower_association import *
|
||||
@ -12,3 +11,9 @@ from .spacy_nlp_pipeline_model import *
|
||||
from .tesseract_ocr_pipeline_model import *
|
||||
from .token import *
|
||||
from .user import *
|
||||
from app import login
|
||||
|
||||
|
||||
@login.user_loader
|
||||
def load_user(user_id):
|
||||
return User.query.get(int(user_id))
|
||||
|
@ -1,10 +0,0 @@
|
||||
from flask_login import AnonymousUserMixin
|
||||
|
||||
|
||||
class AnonymousUser(AnonymousUserMixin):
|
||||
def can(self, permissions):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_administrator(self):
|
||||
return False
|
@ -3,12 +3,13 @@ from enum import IntEnum
|
||||
from flask import current_app, url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from typing import Union
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import xml.etree.ElementTree as ET
|
||||
from app import db
|
||||
from app.converters.vrt import normalize_vrt_file
|
||||
from app.extensions.sqlalchemy_extras import IntEnumColumn
|
||||
from app.ext.flask_sqlalchemy import IntEnumColumn
|
||||
from .corpus_follower_association import CorpusFollowerAssociation
|
||||
|
||||
|
||||
@ -24,7 +25,7 @@ class CorpusStatus(IntEnum):
|
||||
CANCELING_ANALYSIS_SESSION = 9
|
||||
|
||||
@staticmethod
|
||||
def get(corpus_status: 'CorpusStatus | int | str') -> 'CorpusStatus':
|
||||
def get(corpus_status: Union['CorpusStatus', int, str]) -> 'CorpusStatus':
|
||||
if isinstance(corpus_status, CorpusStatus):
|
||||
return corpus_status
|
||||
if isinstance(corpus_status, int):
|
||||
|
@ -1,5 +1,6 @@
|
||||
from flask_hashids import HashidMixin
|
||||
from enum import IntEnum
|
||||
from typing import Union
|
||||
from app import db
|
||||
|
||||
|
||||
@ -10,7 +11,7 @@ class CorpusFollowerPermission(IntEnum):
|
||||
MANAGE_CORPUS = 8
|
||||
|
||||
@staticmethod
|
||||
def get(corpus_follower_permission: 'CorpusFollowerPermission | int | str') -> 'CorpusFollowerPermission':
|
||||
def get(corpus_follower_permission: Union['CorpusFollowerPermission', int, str]) -> 'CorpusFollowerPermission':
|
||||
if isinstance(corpus_follower_permission, CorpusFollowerPermission):
|
||||
return corpus_follower_permission
|
||||
if isinstance(corpus_follower_permission, int):
|
||||
@ -37,16 +38,16 @@ class CorpusFollowerRole(HashidMixin, db.Model):
|
||||
def __repr__(self):
|
||||
return f'<CorpusFollowerRole {self.name}>'
|
||||
|
||||
def has_permission(self, permission: CorpusFollowerPermission | int | str):
|
||||
def has_permission(self, permission: Union[CorpusFollowerPermission, int, str]):
|
||||
perm = CorpusFollowerPermission.get(permission)
|
||||
return self.permissions & perm.value == perm.value
|
||||
|
||||
def add_permission(self, permission: CorpusFollowerPermission | int | str):
|
||||
def add_permission(self, permission: Union[CorpusFollowerPermission, int, str]):
|
||||
perm = CorpusFollowerPermission.get(permission)
|
||||
if not self.has_permission(perm):
|
||||
self.permissions += perm.value
|
||||
|
||||
def remove_permission(self, permission: CorpusFollowerPermission | int | str):
|
||||
def remove_permission(self, permission: Union[CorpusFollowerPermission, int, str]):
|
||||
perm = CorpusFollowerPermission.get(permission)
|
||||
if self.has_permission(perm):
|
||||
self.permissions -= perm.value
|
||||
|
@ -3,10 +3,11 @@ from enum import IntEnum
|
||||
from flask import current_app, url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
from app import db
|
||||
from app.extensions.sqlalchemy_extras import ContainerColumn, IntEnumColumn
|
||||
from app.ext.flask_sqlalchemy import ContainerColumn, IntEnumColumn
|
||||
|
||||
|
||||
class JobStatus(IntEnum):
|
||||
@ -20,7 +21,7 @@ class JobStatus(IntEnum):
|
||||
FAILED = 8
|
||||
|
||||
@staticmethod
|
||||
def get(job_status: 'JobStatus | int | str') -> 'JobStatus':
|
||||
def get(job_status: Union['JobStatus', int, str]) -> 'JobStatus':
|
||||
if isinstance(job_status, JobStatus):
|
||||
return job_status
|
||||
if isinstance(job_status, int):
|
||||
|
@ -1,5 +1,6 @@
|
||||
from enum import IntEnum
|
||||
from flask_hashids import HashidMixin
|
||||
from typing import Union
|
||||
from app import db
|
||||
|
||||
|
||||
@ -13,7 +14,7 @@ class Permission(IntEnum):
|
||||
USE_API = 4
|
||||
|
||||
@staticmethod
|
||||
def get(permission: 'Permission | int | str') -> 'Permission':
|
||||
def get(permission: Union['Permission', int, str]) -> 'Permission':
|
||||
if isinstance(permission, Permission):
|
||||
return permission
|
||||
if isinstance(permission, int):
|
||||
@ -37,16 +38,16 @@ class Role(HashidMixin, db.Model):
|
||||
def __repr__(self):
|
||||
return f'<Role {self.name}>'
|
||||
|
||||
def has_permission(self, permission: Permission | int | str):
|
||||
def has_permission(self, permission: Union[Permission, int, str]):
|
||||
p = Permission.get(permission)
|
||||
return self.permissions & p.value == p.value
|
||||
|
||||
def add_permission(self, permission: Permission | int | str):
|
||||
def add_permission(self, permission: Union[Permission, int, str]):
|
||||
p = Permission.get(permission)
|
||||
if not self.has_permission(p):
|
||||
self.permissions += p.value
|
||||
|
||||
def remove_permission(self, permission: Permission | int | str):
|
||||
def remove_permission(self, permission: Union[Permission, int, str]):
|
||||
p = Permission.get(permission)
|
||||
if self.has_permission(p):
|
||||
self.permissions -= p.value
|
||||
|
@ -5,7 +5,7 @@ from pathlib import Path
|
||||
import requests
|
||||
import yaml
|
||||
from app import db
|
||||
from app.extensions.sqlalchemy_extras import ContainerColumn
|
||||
from app.ext.flask_sqlalchemy import ContainerColumn
|
||||
from .file_mixin import FileMixin
|
||||
from .user import User
|
||||
|
||||
|
@ -5,7 +5,7 @@ from pathlib import Path
|
||||
import requests
|
||||
import yaml
|
||||
from app import db
|
||||
from app.extensions.sqlalchemy_extras import ContainerColumn
|
||||
from app.ext.flask_sqlalchemy import ContainerColumn
|
||||
from .file_mixin import FileMixin
|
||||
from .user import User
|
||||
|
||||
|
@ -5,13 +5,14 @@ from flask_hashids import HashidMixin
|
||||
from flask_login import UserMixin
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
import jwt
|
||||
import re
|
||||
import secrets
|
||||
import shutil
|
||||
from app import db, hashids
|
||||
from app.extensions.sqlalchemy_extras import IntEnumColumn
|
||||
from app.ext.flask_sqlalchemy import IntEnumColumn
|
||||
from .corpus import Corpus
|
||||
from .corpus_follower_association import CorpusFollowerAssociation
|
||||
from .corpus_follower_role import CorpusFollowerRole
|
||||
@ -25,7 +26,7 @@ class ProfilePrivacySettings(IntEnum):
|
||||
SHOW_MEMBER_SINCE = 4
|
||||
|
||||
@staticmethod
|
||||
def get(profile_privacy_setting: 'ProfilePrivacySettings | int | str') -> 'ProfilePrivacySettings':
|
||||
def get(profile_privacy_setting: Union['ProfilePrivacySettings', int, str]) -> 'ProfilePrivacySettings':
|
||||
if isinstance(profile_privacy_setting, ProfilePrivacySettings):
|
||||
return profile_privacy_setting
|
||||
if isinstance(profile_privacy_setting, int):
|
||||
@ -131,10 +132,6 @@ class User(HashidMixin, UserMixin, db.Model):
|
||||
def __repr__(self):
|
||||
return f'<User {self.username}>'
|
||||
|
||||
@property
|
||||
def is_administrator(self):
|
||||
return self.can(Permission.ADMINISTRATE)
|
||||
|
||||
@property
|
||||
def jsonpatch_path(self):
|
||||
return f'/users/{self.hashid}'
|
||||
@ -145,8 +142,7 @@ class User(HashidMixin, UserMixin, db.Model):
|
||||
|
||||
@password.setter
|
||||
def password(self, password):
|
||||
#pbkdf2:sha256
|
||||
self.password_hash = generate_password_hash(password, method='pbkdf2')
|
||||
self.password_hash = generate_password_hash(password)
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
@ -298,6 +294,9 @@ class User(HashidMixin, UserMixin, db.Model):
|
||||
algorithm='HS256'
|
||||
)
|
||||
|
||||
def is_administrator(self):
|
||||
return self.can(Permission.ADMINISTRATE)
|
||||
|
||||
def ping(self):
|
||||
self.last_seen = datetime.utcnow()
|
||||
|
||||
|
@ -1,37 +0,0 @@
|
||||
from cqi import CQiClient
|
||||
from threading import Lock
|
||||
from flask import session
|
||||
|
||||
|
||||
class CQiOverSocketIOSessionManager:
|
||||
@staticmethod
|
||||
def setup():
|
||||
session['cqi_over_sio'] = {}
|
||||
|
||||
@staticmethod
|
||||
def teardown():
|
||||
session.pop('cqi_over_sio')
|
||||
|
||||
@staticmethod
|
||||
def set_corpus_id(corpus_id: int):
|
||||
session['cqi_over_sio']['corpus_id'] = corpus_id
|
||||
|
||||
@staticmethod
|
||||
def get_corpus_id() -> int:
|
||||
return session['cqi_over_sio']['corpus_id']
|
||||
|
||||
@staticmethod
|
||||
def set_cqi_client(cqi_client: CQiClient):
|
||||
session['cqi_over_sio']['cqi_client'] = cqi_client
|
||||
|
||||
@staticmethod
|
||||
def get_cqi_client() -> CQiClient:
|
||||
return session['cqi_over_sio']['cqi_client']
|
||||
|
||||
@staticmethod
|
||||
def set_cqi_client_lock(cqi_client_lock: Lock):
|
||||
session['cqi_over_sio']['cqi_client_lock'] = cqi_client_lock
|
||||
|
||||
@staticmethod
|
||||
def get_cqi_client_lock() -> Lock:
|
||||
return session['cqi_over_sio']['cqi_client_lock']
|
@ -1,78 +0,0 @@
|
||||
from flask_login import current_user
|
||||
from flask_socketio import join_room, leave_room, Namespace
|
||||
from app import hashids
|
||||
from app.decorators import socketio_login_required
|
||||
from app.models import User
|
||||
|
||||
|
||||
class UsersNamespace(Namespace):
|
||||
@socketio_login_required
|
||||
def on_get_user(self, user_hashid: str) -> dict:
|
||||
user_id = hashids.decode(user_hashid)
|
||||
|
||||
if not isinstance(user_id, int):
|
||||
return {'code': 400, 'msg': 'Bad Request'}
|
||||
|
||||
user = User.query.get(user_id)
|
||||
|
||||
if user is None:
|
||||
return {'status': 404, 'statusText': 'Not found'}
|
||||
|
||||
if not (
|
||||
user == current_user
|
||||
or current_user.is_administrator
|
||||
):
|
||||
return {'status': 403, 'statusText': 'Forbidden'}
|
||||
|
||||
return {
|
||||
'body': user.to_json_serializeable(
|
||||
backrefs=True,
|
||||
relationships=True
|
||||
),
|
||||
'status': 200,
|
||||
'statusText': 'OK'
|
||||
}
|
||||
|
||||
@socketio_login_required
|
||||
def on_subscribe_user(self, user_hashid: str) -> dict:
|
||||
user_id = hashids.decode(user_hashid)
|
||||
|
||||
if not isinstance(user_id, int):
|
||||
return {'code': 400, 'msg': 'Bad Request'}
|
||||
|
||||
user = User.query.get(user_id)
|
||||
|
||||
if user is None:
|
||||
return {'status': 404, 'statusText': 'Not found'}
|
||||
|
||||
if not (
|
||||
user == current_user
|
||||
or current_user.is_administrator
|
||||
):
|
||||
return {'status': 403, 'statusText': 'Forbidden'}
|
||||
|
||||
join_room(f'/users/{user.hashid}')
|
||||
|
||||
return {'status': 200, 'statusText': 'OK'}
|
||||
|
||||
@socketio_login_required
|
||||
def on_unsubscribe_user(self, user_hashid: str) -> dict:
|
||||
user_id = hashids.decode(user_hashid)
|
||||
|
||||
if not isinstance(user_id, int):
|
||||
return {'code': 400, 'msg': 'Bad Request'}
|
||||
|
||||
user = User.query.get(user_id)
|
||||
|
||||
if user is None:
|
||||
return {'status': 404, 'statusText': 'Not found'}
|
||||
|
||||
if not (
|
||||
user == current_user
|
||||
or current_user.is_administrator
|
||||
):
|
||||
return {'status': 403, 'statusText': 'Forbidden'}
|
||||
|
||||
leave_room(f'/users/{user.hashid}')
|
||||
|
||||
return {'status': 200, 'statusText': 'OK'}
|
@ -1,10 +1,12 @@
|
||||
from flask import abort, current_app, flash, redirect, render_template, request, url_for
|
||||
from flask import abort, current_app, flash, Markup, redirect, render_template, request, url_for
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from flask_login import current_user
|
||||
import requests
|
||||
from app import db, hashids
|
||||
from app.models import (
|
||||
Job,
|
||||
JobInput,
|
||||
JobResult,
|
||||
JobStatus,
|
||||
TesseractOCRPipelineModel,
|
||||
SpaCyNLPPipelineModel
|
||||
@ -19,11 +21,13 @@ from .forms import (
|
||||
|
||||
|
||||
@bp.route('/services')
|
||||
@register_breadcrumb(bp, '.', 'Services')
|
||||
def services():
|
||||
return redirect(url_for('main.dashboard'))
|
||||
|
||||
|
||||
@bp.route('/file-setup-pipeline', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.file_setup_pipeline', '<i class="nopaque-icons service-icons left" data-service="file-setup-pipeline"></i>File Setup')
|
||||
def file_setup_pipeline():
|
||||
service = 'file-setup-pipeline'
|
||||
service_manifest = SERVICES[service]
|
||||
@ -53,7 +57,7 @@ def file_setup_pipeline():
|
||||
abort(500)
|
||||
job.status = JobStatus.SUBMITTED
|
||||
db.session.commit()
|
||||
message = f'Job "<a href="{job.url}">{job.title}</a>" created'
|
||||
message = Markup(f'Job "<a href="{job.url}">{job.title}</a>" created')
|
||||
flash(message, 'job')
|
||||
return {}, 201, {'Location': job.url}
|
||||
return render_template(
|
||||
@ -64,12 +68,15 @@ def file_setup_pipeline():
|
||||
|
||||
|
||||
@bp.route('/tesseract-ocr-pipeline', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.tesseract_ocr_pipeline', '<i class="nopaque-icons service-icons left" data-service="tesseract-ocr-pipeline"></i>Tesseract OCR Pipeline')
|
||||
def tesseract_ocr_pipeline():
|
||||
service_name = 'tesseract-ocr-pipeline'
|
||||
service_manifest = SERVICES[service_name]
|
||||
version = request.args.get('version', service_manifest['latest_version'])
|
||||
if version not in service_manifest['versions']:
|
||||
abort(404)
|
||||
job_results = JobResult.query.all()
|
||||
choosable_job_ids = [job_result.job.hashid for job_result in job_results if job_result.job.service == "file-setup-pipeline" and job_result.filename.endswith('.pdf')]
|
||||
form = CreateTesseractOCRPipelineJobForm(prefix='create-job-form', version=version)
|
||||
if form.is_submitted():
|
||||
if not form.validate():
|
||||
@ -96,7 +103,7 @@ def tesseract_ocr_pipeline():
|
||||
abort(500)
|
||||
job.status = JobStatus.SUBMITTED
|
||||
db.session.commit()
|
||||
message = f'Job "<a href="{job.url}">{job.title}</a>" created'
|
||||
message = Markup(f'Job "<a href="{job.url}">{job.title}</a>" created')
|
||||
flash(message, 'job')
|
||||
return {}, 201, {'Location': job.url}
|
||||
tesseract_ocr_pipeline_models = [
|
||||
@ -107,6 +114,7 @@ def tesseract_ocr_pipeline():
|
||||
return render_template(
|
||||
'services/tesseract_ocr_pipeline.html.j2',
|
||||
title=service_manifest['name'],
|
||||
choosable_job_ids=choosable_job_ids,
|
||||
form=form,
|
||||
tesseract_ocr_pipeline_models=tesseract_ocr_pipeline_models,
|
||||
user_tesseract_ocr_pipeline_models_count=user_tesseract_ocr_pipeline_models_count
|
||||
@ -114,6 +122,7 @@ def tesseract_ocr_pipeline():
|
||||
|
||||
|
||||
@bp.route('/transkribus-htr-pipeline', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.transkribus_htr_pipeline', '<i class="nopaque-icons service-icons left" data-service="transkribus-htr-pipeline"></i>Transkribus HTR Pipeline')
|
||||
def transkribus_htr_pipeline():
|
||||
if not current_app.config.get('NOPAQUE_TRANSKRIBUS_ENABLED'):
|
||||
abort(404)
|
||||
@ -159,7 +168,7 @@ def transkribus_htr_pipeline():
|
||||
abort(500)
|
||||
job.status = JobStatus.SUBMITTED
|
||||
db.session.commit()
|
||||
message = f'Job "<a href="{job.url}">{job.title}</a>" created'
|
||||
message = Markup(f'Job "<a href="{job.url}">{job.title}</a>" created')
|
||||
flash(message, 'job')
|
||||
return {}, 201, {'Location': job.url}
|
||||
return render_template(
|
||||
@ -171,6 +180,7 @@ def transkribus_htr_pipeline():
|
||||
|
||||
|
||||
@bp.route('/spacy-nlp-pipeline', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.spacy_nlp_pipeline', '<i class="nopaque-icons service-icons left" data-service="spacy-nlp-pipeline"></i>SpaCy NLP Pipeline')
|
||||
def spacy_nlp_pipeline():
|
||||
service = 'spacy-nlp-pipeline'
|
||||
service_manifest = SERVICES[service]
|
||||
@ -204,7 +214,7 @@ def spacy_nlp_pipeline():
|
||||
abort(500)
|
||||
job.status = JobStatus.SUBMITTED
|
||||
db.session.commit()
|
||||
message = f'Job "<a href="{job.url}">{job.title}</a>" created'
|
||||
message = Markup(f'Job "<a href="{job.url}">{job.title}</a>" created')
|
||||
flash(message, 'job')
|
||||
return {}, 201, {'Location': job.url}
|
||||
return render_template(
|
||||
@ -217,6 +227,7 @@ def spacy_nlp_pipeline():
|
||||
|
||||
|
||||
@bp.route('/corpus-analysis')
|
||||
@register_breadcrumb(bp, '.corpus_analysis', '<i class="nopaque-icons service-icons left" data-service="corpus-analysis"></i>Corpus Analysis')
|
||||
def corpus_analysis():
|
||||
return render_template(
|
||||
'services/corpus_analysis.html.j2',
|
@ -1,10 +1,12 @@
|
||||
from flask import g, url_for
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from flask_login import current_user
|
||||
from app.blueprints.users.settings.routes import settings as settings_route
|
||||
from app.users.settings.routes import settings as settings_route
|
||||
from . import bp
|
||||
|
||||
|
||||
@bp.route('/settings', methods=['GET', 'POST'])
|
||||
@register_breadcrumb(bp, '.', '<i class="material-icons left">settings</i>Settings')
|
||||
def settings():
|
||||
g._nopaque_redirect_location_on_post = url_for('.settings')
|
||||
return settings_route(current_user.id)
|
290
app/static/css/colors.scss
Normal file
290
app/static/css/colors.scss
Normal file
@ -0,0 +1,290 @@
|
||||
/// Map deep get
|
||||
/// @author Kitty Giraudel
|
||||
/// @access public
|
||||
/// @param {Map} $map - Map
|
||||
/// @param {Arglist} $keys - Key chain
|
||||
/// @return {*} - Desired value
|
||||
@function map-deep-get($map, $keys...) {
|
||||
@each $key in $keys {
|
||||
$map: map-get($map, $key);
|
||||
}
|
||||
@return $map;
|
||||
}
|
||||
|
||||
|
||||
$color: (
|
||||
"baseline": (
|
||||
"primary": #00426f,
|
||||
"primary-variant": #1a5c89,
|
||||
"secondary": #00426f,
|
||||
"secondary-variant": #1a5c89,
|
||||
"background": #ffffff,
|
||||
"surface": #ffffff,
|
||||
"error": #b00020
|
||||
),
|
||||
"social-area": (
|
||||
"base": #d6ae86,
|
||||
"darken": #C98536,
|
||||
"lighten": #EAE2DB
|
||||
),
|
||||
"service": (
|
||||
"corpus-analysis": (
|
||||
"base": #aa9cc9,
|
||||
"darken": #6b3f89,
|
||||
"lighten": #ebe8f6
|
||||
),
|
||||
"file-setup-pipeline": (
|
||||
"base": #d5dc95,
|
||||
"darken": #a1b300,
|
||||
"lighten": #f2f3e1
|
||||
),
|
||||
"spacy-nlp-pipeline": (
|
||||
"base": #98acd2,
|
||||
"darken": #0064a3,
|
||||
"lighten": #e5e8f5
|
||||
),
|
||||
"tesseract-ocr-pipeline": (
|
||||
"base": #a9d8c8,
|
||||
"darken": #00a58b,
|
||||
"lighten": #e7f4f1
|
||||
),
|
||||
"transkribus-htr-pipeline": (
|
||||
"base": #607d8b,
|
||||
"darken": #37474f,
|
||||
"lighten": #cfd8dc
|
||||
)
|
||||
),
|
||||
"status": (
|
||||
"corpus": (
|
||||
"UNPREPARED": #9e9e9e,
|
||||
"QUEUED": #2196f3,
|
||||
"BUILDING": #ffc107,
|
||||
"BUILT": #4caf50,
|
||||
"FAILED": #f44336,
|
||||
"STARTING_ANALYSIS_SESSION": #2196f3,
|
||||
"RUNNING_ANALYSIS_SESSION": #4caf50,
|
||||
"CANCELING_ANALYSIS_SESSION": #ff5722
|
||||
),
|
||||
"job": (
|
||||
"INITIALIZING": #9e9e9e,
|
||||
"SUBMITTED": #9e9e9e,
|
||||
"QUEUED": #2196f3,
|
||||
"RUNNING": #ffc107,
|
||||
"CANCELING": #ff5722,
|
||||
"CANCELED": #ff5722,
|
||||
"COMPLETED": #4caf50,
|
||||
"FAILED": #f44336
|
||||
)
|
||||
),
|
||||
"s-attr": (
|
||||
"PERSON": #a6e22d,
|
||||
"PER": #a6e22d,
|
||||
"NORP": #ef60b4,
|
||||
"FACILITY": #43c6fc,
|
||||
"ORG": #43c6fc,
|
||||
"GPE": #fd9720,
|
||||
"LOC": #fd9720,
|
||||
"PRODUCT": #a99dfb,
|
||||
"MISC": #a99dfb,
|
||||
"EVENT": #fc0,
|
||||
"WORK_OF_ART": #fc0,
|
||||
"LANGUAGE": #fc0,
|
||||
"DATE": #2fbbab,
|
||||
"TIME": #2fbbab,
|
||||
"PERCENT": #bbb,
|
||||
"MONEY": #bbb,
|
||||
"QUANTITY": #bbb,
|
||||
"ORDINAL": #bbb,
|
||||
"CARDINAL": #bbb
|
||||
)
|
||||
);
|
||||
|
||||
@each $key, $color-code in map-get($color, "baseline") {
|
||||
.#{$key}-color {
|
||||
background-color: $color-code !important;
|
||||
}
|
||||
|
||||
.#{$key}-color-border {
|
||||
border-color: $color-code !important;
|
||||
}
|
||||
|
||||
.#{$key}-color-text {
|
||||
color: $color-code !important;
|
||||
}
|
||||
}
|
||||
|
||||
@each $key, $color-code in map-get($color, "social-area") {
|
||||
.social-area-color-#{$key} {
|
||||
background-color: $color-code !important;
|
||||
}
|
||||
|
||||
.social-area-color-border-#{$key} {
|
||||
border-color: $color-code !important;
|
||||
}
|
||||
}
|
||||
|
||||
@each $service-name, $color-palette in map-get($color, "service") {
|
||||
.service-color[data-service="#{$service-name}"] {
|
||||
background-color: map-get($color-palette, "base") !important;
|
||||
|
||||
&.darken {
|
||||
background-color: map-get($color-palette, "darken") !important;
|
||||
}
|
||||
|
||||
&.lighten {
|
||||
background-color: map-get($color-palette, "lighten") !important;
|
||||
}
|
||||
}
|
||||
|
||||
.service-color-border[data-service="#{$service-name}"] {
|
||||
border-color: map-get($color-palette, "base") !important;
|
||||
|
||||
&.border-darken {
|
||||
border-color: map-get($color-palette, "darken") !important;
|
||||
}
|
||||
|
||||
&.border-lighten {
|
||||
border-color: map-get($color-palette, "lighten") !important;
|
||||
}
|
||||
}
|
||||
|
||||
.service-color-text[data-service="#{$service-name}"] {
|
||||
color: map-get($color-palette, "base") !important;
|
||||
|
||||
&.text-darken {
|
||||
color: map-get($color-palette, "darken") !important;
|
||||
}
|
||||
|
||||
&.text-lighten {
|
||||
color: map-get($color-palette, "lighten") !important;
|
||||
}
|
||||
}
|
||||
|
||||
.service-scheme[data-service="#{$service-name}"] {
|
||||
background-color: map-get($color-palette, "lighten");
|
||||
|
||||
.btn, .btn-small, .btn-large, .btn-floating {
|
||||
background-color: map-get($color-palette, "darken");
|
||||
|
||||
&:hover {
|
||||
background-color: map-get($color-palette, "base");
|
||||
}
|
||||
}
|
||||
|
||||
.pagination {
|
||||
li.active {
|
||||
background-color: map-get($color-palette, "darken");
|
||||
}
|
||||
}
|
||||
|
||||
.table-of-contents {
|
||||
a.active, a:hover {
|
||||
border-color: map-get($color-palette, "darken");
|
||||
}
|
||||
}
|
||||
|
||||
.tabs {
|
||||
.tab {
|
||||
&.disabled {
|
||||
a {
|
||||
color: inherit;
|
||||
|
||||
&:hover {
|
||||
color: change-color(map-get($color-palette, "darken"), $alpha: 0.15);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
|
||||
&:focus, &:hover, &.active {
|
||||
color: map-get($color-palette, "darken");
|
||||
}
|
||||
|
||||
&:focus, &.active, &.active:focus {
|
||||
background-color: change-color(map-get($color-palette, "darken"), $alpha: 0.15);
|
||||
}
|
||||
}
|
||||
}
|
||||
.indicator {
|
||||
background-color: map-get($color-palette, "darken");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@each $ressource-name, $color-palette in map-get($color, "status") {
|
||||
@each $key, $color-code in $color-palette {
|
||||
.#{$ressource-name}-status-color[data-status="#{$key}"] {
|
||||
background-color: $color-code !important;
|
||||
}
|
||||
|
||||
.#{$ressource-name}-status-color-border[data-status="#{$key}"] {
|
||||
border-color: $color-code !important;
|
||||
}
|
||||
|
||||
.#{$ressource-name}-status-color-text[data-status="#{$key}"] {
|
||||
color: $color-code !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@each $key, $color-code in map-get($color, "s-attr") {
|
||||
.chip.s-attr[data-s-attr-type="ent"][data-s-attr-ent-type="#{$key}"] {
|
||||
background-color: $color-code !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
main {
|
||||
.btn, .btn-small, .btn-large, .btn-floating {
|
||||
background-color: map-deep-get($color, "baseline", "secondary");
|
||||
|
||||
&:hover {
|
||||
background-color: map-deep-get($color, "baseline", "secondary-variant");
|
||||
}
|
||||
}
|
||||
|
||||
.pagination {
|
||||
li.active {
|
||||
background-color: map-deep-get($color, "baseline", "secondary");
|
||||
}
|
||||
}
|
||||
|
||||
.table-of-contents {
|
||||
a.active, a:hover {
|
||||
border-color: map-deep-get($color, "baseline", "secondary");
|
||||
}
|
||||
}
|
||||
|
||||
.tabs {
|
||||
.tab {
|
||||
&.disabled {
|
||||
a {
|
||||
color: inherit;
|
||||
|
||||
&:hover {
|
||||
color: change-color(map-deep-get($color, "baseline", "secondary"), $alpha: 0.15);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
|
||||
&:focus, &:hover, &.active {
|
||||
color: map-deep-get($color, "baseline", "secondary");
|
||||
}
|
||||
|
||||
&:focus, &.active, &.active:focus {
|
||||
background-color: change-color(map-deep-get($color, "baseline", "secondary"), $alpha: 0.15);
|
||||
}
|
||||
}
|
||||
}
|
||||
.indicator {
|
||||
background-color: map-deep-get($color, "baseline", "secondary");
|
||||
}
|
||||
}
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
[data-corpus-status="UNPREPARED"] {
|
||||
--corpus-status-color: #9e9e9e;
|
||||
}
|
||||
|
||||
[data-corpus-status="QUEUED"] {
|
||||
--corpus-status-color: #2196f3;
|
||||
}
|
||||
|
||||
[data-corpus-status="BUILDING"] {
|
||||
--corpus-status-color: #ffc107;
|
||||
}
|
||||
|
||||
[data-corpus-status="BUILT"] {
|
||||
--corpus-status-color: #4caf50;
|
||||
}
|
||||
|
||||
[data-corpus-status="FAILED"] {
|
||||
--corpus-status-color: #f44336;
|
||||
}
|
||||
|
||||
[data-corpus-status="STARTING_ANALYSIS_SESSION"] {
|
||||
--corpus-status-color: #2196f3;
|
||||
}
|
||||
|
||||
[data-corpus-status="RUNNING_ANALYSIS_SESSION"] {
|
||||
--corpus-status-color: #4caf50;
|
||||
}
|
||||
|
||||
[data-corpus-status="CANCELING_ANALYSIS_SESSION"] {
|
||||
--corpus-status-color: #ff5722;
|
||||
}
|
||||
|
||||
.corpus-status-color {
|
||||
background-color: var(--corpus-status-color) !important;
|
||||
}
|
||||
|
||||
.corpus-status-color-border {
|
||||
border-color: var(--corpus-status-color) !important;
|
||||
}
|
||||
|
||||
.corpus-status-color-text {
|
||||
color: var(--corpus-status-color) !important;
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
[data-corpus-status="UNPREPARED"] {
|
||||
--corpus-status-content: "unprepared";
|
||||
}
|
||||
|
||||
[data-corpus-status="QUEUED"] {
|
||||
--corpus-status-content: "queued";
|
||||
}
|
||||
|
||||
[data-corpus-status="BUILDING"] {
|
||||
--corpus-status-content: "building";
|
||||
}
|
||||
|
||||
[data-corpus-status="BUILT"] {
|
||||
--corpus-status-content: "built";
|
||||
}
|
||||
|
||||
[data-corpus-status="FAILED"] {
|
||||
--corpus-status-content: "failed";
|
||||
}
|
||||
|
||||
[data-corpus-status="STARTING_ANALYSIS_SESSION"] {
|
||||
--corpus-status-content: "starting analysis session";
|
||||
}
|
||||
|
||||
[data-corpus-status="RUNNING_ANALYSIS_SESSION"] {
|
||||
--corpus-status-content: "running analysis session";
|
||||
}
|
||||
|
||||
[data-corpus-status="CANCELING_ANALYSIS_SESSION"] {
|
||||
--corpus-status-content: "canceling analysis session";
|
||||
}
|
||||
|
||||
.corpus-status-text:empty::before {
|
||||
content: var(--corpus-status-content);
|
||||
}
|
31
app/static/css/helpers.scss
Normal file
31
app/static/css/helpers.scss
Normal file
@ -0,0 +1,31 @@
|
||||
/*
|
||||
* Spacing
|
||||
*/
|
||||
$spacing-shortcuts: ("margin": "mg", "padding": "pd");
|
||||
$spacing-directions: ("top": "t", "right": "r", "bottom": "b", "left": "l");
|
||||
$spacing-values: ("0": 0, "1": 0.25rem, "2": 0.5rem, "3": 0.75rem, "4": 1rem, "5": 1.5rem, "6": 3rem, "auto": auto);
|
||||
|
||||
@each $spacing-shortcut-name, $spacing-shortcut-value in $spacing-shortcuts {
|
||||
@each $spacing-name, $spacing-value in $spacing-values {
|
||||
// All directions
|
||||
.#{$spacing-shortcut-value}-#{$spacing-name} {
|
||||
#{$spacing-shortcut-name}: $spacing-value !important;
|
||||
}
|
||||
// Horizontal axis
|
||||
.#{$spacing-shortcut-value}x-#{$spacing-name} {
|
||||
#{$spacing-shortcut-name}-left: $spacing-value !important;
|
||||
#{$spacing-shortcut-name}-right: $spacing-value !important;
|
||||
}
|
||||
// Vertical axis
|
||||
.#{$spacing-shortcut-value}y-#{$spacing-name} {
|
||||
#{$spacing-shortcut-name}-top: $spacing-value !important;
|
||||
#{$spacing-shortcut-name}-bottom: $spacing-value !important;
|
||||
}
|
||||
// Cardinal directions
|
||||
@each $spacing-direction-name, $spacing-direction-value in $spacing-directions {
|
||||
.#{$spacing-shortcut-value}#{$spacing-direction-value}-#{$spacing-name} {
|
||||
#{$spacing-shortcut-name}-#{$spacing-direction-name}: $spacing-value !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
[data-job-status="INITIALIZING"] {
|
||||
--job-status-color: #9e9e9e;
|
||||
}
|
||||
|
||||
[data-job-status="SUBMITTED"] {
|
||||
--job-status-color: #9e9e9e;
|
||||
}
|
||||
|
||||
[data-job-status="QUEUED"] {
|
||||
--job-status-color: #2196f3;
|
||||
}
|
||||
|
||||
[data-job-status="RUNNING"] {
|
||||
--job-status-color: #ffc107;
|
||||
}
|
||||
|
||||
[data-job-status="CANCELING"] {
|
||||
--job-status-color: #ff5722;
|
||||
}
|
||||
|
||||
[data-job-status="CANCELED"] {
|
||||
--job-status-color: #ff5722;
|
||||
}
|
||||
|
||||
[data-job-status="COMPLETED"] {
|
||||
--job-status-color: #4caf50;
|
||||
}
|
||||
|
||||
[data-job-status="FAILED"] {
|
||||
--job-status-color: #f44336;
|
||||
}
|
||||
|
||||
.job-status-color {
|
||||
background-color: var(--job-status-color) !important;
|
||||
}
|
||||
|
||||
.job-status-color-border {
|
||||
border-color: var(--job-status-color) !important;
|
||||
}
|
||||
|
||||
.job-status-color-text {
|
||||
color: var(--job-status-color) !important;
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
[data-job-status="INITIALIZING"] {
|
||||
--job-status-content: "initializing";
|
||||
}
|
||||
|
||||
[data-job-status="SUBMITTED"] {
|
||||
--job-status-content: "submitted";
|
||||
}
|
||||
|
||||
[data-job-status="QUEUED"] {
|
||||
--job-status-content: "queued";
|
||||
}
|
||||
|
||||
[data-job-status="RUNNING"] {
|
||||
--job-status-content: "running";
|
||||
}
|
||||
|
||||
[data-job-status="CANCELING"] {
|
||||
--job-status-content: "canceling";
|
||||
}
|
||||
|
||||
[data-job-status="CANCELED"] {
|
||||
--job-status-content: "canceled";
|
||||
}
|
||||
|
||||
[data-job-status="COMPLETED"] {
|
||||
--job-status-content: "completed";
|
||||
}
|
||||
|
||||
[data-job-status="FAILED"] {
|
||||
--job-status-content: "failed";
|
||||
}
|
||||
|
||||
.job-status-text:empty::before {
|
||||
content: var(--job-status-content);
|
||||
}
|
8681
app/static/css/materialize.css
vendored
8681
app/static/css/materialize.css
vendored
File diff suppressed because it is too large
Load Diff
@ -1,63 +0,0 @@
|
||||
/* #region sidenav-fixed */
|
||||
/*
|
||||
* The sidenav-fixed class is used which causes the sidenav to be fixed and open
|
||||
* on large screens and hides to the regular functionality on smaller screens.
|
||||
* In order to prevent the sidenav to overlap the content, the content (header, main and footer)
|
||||
* gets an offset equal to the width of the sidenav.
|
||||
*
|
||||
* Read more: https://materializecss.com/sidenav.html#variations
|
||||
*/
|
||||
body[data-sidenav-fixed="true" i] header,
|
||||
body[data-sidenav-fixed="true" i] main,
|
||||
body[data-sidenav-fixed="true" i] footer {
|
||||
padding-left: 300px;
|
||||
}
|
||||
|
||||
@media only screen and (max-width : 992px) {
|
||||
body[data-sidenav-fixed="true" i] header,
|
||||
body[data-sidenav-fixed="true" i] main,
|
||||
body[data-sidenav-fixed="true" i] footer {
|
||||
padding-left: 0;
|
||||
}
|
||||
}
|
||||
|
||||
body[data-sidenav-fixed="true" i] .navbar-fixed > nav {
|
||||
width: calc(100% - 300px);
|
||||
}
|
||||
|
||||
@media only screen and (max-width : 992px) {
|
||||
body[data-sidenav-fixed="true" i] .navbar-fixed > nav {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
/* #endregion sidenav-fixed */
|
||||
|
||||
/* #region sticky-footer */
|
||||
/*
|
||||
* Sticky Footer:
|
||||
* A sticky footer always stays on the bottom of the page regardless of how
|
||||
* little content is on the page. However, this footer will be pushed down if
|
||||
* there is a lot of content, so it is different from a fixed footer.
|
||||
*
|
||||
* Note: This may cause issues in Internet Explorer which has weak support for
|
||||
* flexbox.
|
||||
*
|
||||
* Read more: https://materializecss.com/footer.html#sticky-footer
|
||||
*/
|
||||
body[data-sticky-footer="true" i] {
|
||||
display: flex;
|
||||
min-height: 100vh;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
body[data-sticky-footer="true" i] main {
|
||||
flex: 1 0 auto;
|
||||
}
|
||||
/* #endregion sticky-footer */
|
||||
|
||||
/* #region other-overrides */
|
||||
::placeholder {
|
||||
color: #9e9e9e;
|
||||
opacity: 1;
|
||||
}
|
||||
/* #endregion other-overrides */
|
8
app/static/css/materialize/fixes.css
Normal file
8
app/static/css/materialize/fixes.css
Normal file
@ -0,0 +1,8 @@
|
||||
.parallax-container .parallax {
|
||||
z-index: 0;
|
||||
}
|
||||
|
||||
.autocomplete-content {
|
||||
width: 100% !important;
|
||||
left: 0 !important;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user