Compare commits
1 Commits
access-pip
...
05bccc7f88
Author | SHA1 | Date | |
---|---|---|---|
05bccc7f88 |
@ -8,6 +8,5 @@
|
||||
!.flaskenv
|
||||
!boot.sh
|
||||
!config.py
|
||||
!docker-nopaque-entrypoint.sh
|
||||
!nopaque.py
|
||||
!requirements.txt
|
||||
|
210
.env.tpl
@ -1,32 +1,204 @@
|
||||
##############################################################################
|
||||
# Variables for use in Docker Compose YAML files #
|
||||
##############################################################################
|
||||
################################################################################
|
||||
# Docker #
|
||||
################################################################################
|
||||
# DEFAULT: ./data
|
||||
# NOTE: Use `.` as <project-basedir>
|
||||
# HOST_DATA_DIR=
|
||||
|
||||
# Example: 1000
|
||||
# HINT: Use this bash command `id -u`
|
||||
# NOTE: 0 (= root user) is not allowed
|
||||
HOST_UID=
|
||||
|
||||
# Example: 1000
|
||||
# HINT: Use this bash command `id -g`
|
||||
HOST_GID=
|
||||
|
||||
# Example: 999
|
||||
# HINT: Use this bash command `getent group docker | cut -d: -f3`
|
||||
HOST_DOCKER_GID=
|
||||
|
||||
# DEFAULT: nopaque
|
||||
# DOCKER_DEFAULT_NETWORK_NAME=
|
||||
# DEFAULT: ./logs
|
||||
# NOTES: Use `.` as <project-basedir>
|
||||
# HOST_LOG_DIR=
|
||||
|
||||
# DEFAULT: ./volumes/db/data
|
||||
# NOTE: Use `.` as <project-basedir>
|
||||
# DOCKER_DB_SERVICE_DATA_VOLUME_SOURCE_PATH=
|
||||
# DEFAULT: nopaque_default
|
||||
# DOCKER_NETWORK_NAME=
|
||||
|
||||
# DEFAULT: ./volumes/mq/data
|
||||
# NOTE: Use `.` as <project-basedir>
|
||||
# DOCKER_MQ_SERVICE_DATA_VOLUME_SOURCE_PATH=
|
||||
################################################################################
|
||||
# Flask #
|
||||
# https://flask.palletsprojects.com/en/1.1.x/config/ #
|
||||
################################################################################
|
||||
# CHOOSE ONE: http, https
|
||||
# DEFAULT: http
|
||||
# PREFERRED_URL_SCHEME=
|
||||
|
||||
# NOTE: This must be a network share and it must be available on all
|
||||
# Docker Swarm nodes, mounted to the same path with the same
|
||||
# user and group ownership.
|
||||
DOCKER_NOPAQUE_SERVICE_DATA_VOLUME_SOURCE_PATH=
|
||||
# DEFAULT: hard to guess string
|
||||
# HINT: Use this bash command `python -c "import uuid; print(uuid.uuid4().hex)"`
|
||||
# SECRET_KEY=
|
||||
|
||||
# DEFAULT: ./volumes/nopaque/logs
|
||||
# NOTE: Use `.` as <project-basedir>
|
||||
# DOCKER_NOPAQUE_SERVICE_LOGS_VOLUME_SOURCE_PATH=.
|
||||
# DEFAULT: localhost:5000
|
||||
# Example: nopaque.example.com/nopaque.example.com:5000
|
||||
# HINT: If your instance is publicly available on a different Port then 80/443,
|
||||
# you will have to add this to the server name
|
||||
# SERVER_NAME=
|
||||
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: False
|
||||
# HINT: Set to true if you redirect http to https
|
||||
# SESSION_COOKIE_SECURE=
|
||||
|
||||
|
||||
################################################################################
|
||||
# Flask-Assets #
|
||||
# https://webassets.readthedocs.io/en/latest/ #
|
||||
################################################################################
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: False
|
||||
# ASSETS_DEBUG=
|
||||
|
||||
|
||||
################################################################################
|
||||
# Flask-Hashids #
|
||||
# https://github.com/Pevtrick/Flask-Hashids #
|
||||
################################################################################
|
||||
# DEFAULT: 16
|
||||
# HASHIDS_MIN_LENGTH=
|
||||
|
||||
# NOTE: Use this bash command `python -c "import uuid; print(uuid.uuid4().hex)"`
|
||||
# It is strongly recommended that this is NEVER the same as the SECRET_KEY
|
||||
HASHIDS_SALT=
|
||||
|
||||
|
||||
################################################################################
|
||||
# Flask-Login #
|
||||
# https://flask-login.readthedocs.io/en/latest/ #
|
||||
################################################################################
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: False
|
||||
# HINT: Set to true if you redirect http to https
|
||||
# REMEMBER_COOKIE_SECURE=
|
||||
|
||||
|
||||
################################################################################
|
||||
# Flask-Mail #
|
||||
# https://pythonhosted.org/Flask-Mail/ #
|
||||
################################################################################
|
||||
# EXAMPLE: nopaque Admin <nopaque@example.com>
|
||||
MAIL_DEFAULT_SENDER=
|
||||
|
||||
MAIL_PASSWORD=
|
||||
|
||||
# EXAMPLE: smtp.example.com
|
||||
MAIL_SERVER=
|
||||
|
||||
# EXAMPLE: 587
|
||||
MAIL_PORT=
|
||||
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: False
|
||||
# MAIL_USE_SSL=
|
||||
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: False
|
||||
# MAIL_USE_TLS=
|
||||
|
||||
# EXAMPLE: nopaque@example.com
|
||||
MAIL_USERNAME=
|
||||
|
||||
|
||||
################################################################################
|
||||
# Flask-SQLAlchemy #
|
||||
# https://flask-sqlalchemy.palletsprojects.com/en/2.x/config/ #
|
||||
################################################################################
|
||||
# DEFAULT: 'sqlite:///<nopaque-basedir>/data.sqlite'
|
||||
# NOTE: Use `.` as <nopaque-basedir>,
|
||||
# Don't use a SQLite database when using Docker
|
||||
# SQLALCHEMY_DATABASE_URI=
|
||||
|
||||
|
||||
################################################################################
|
||||
# nopaque #
|
||||
################################################################################
|
||||
# An account is registered with this email adress gets automatically assigned
|
||||
# the administrator role.
|
||||
# EXAMPLE: admin.nopaque@example.com
|
||||
NOPAQUE_ADMIN=
|
||||
|
||||
# DEFAULT: /mnt/nopaque
|
||||
# NOTE: This must be a network share and it must be available on all Docker
|
||||
# Swarm nodes
|
||||
# NOPAQUE_DATA_DIR=
|
||||
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: True
|
||||
# NOPAQUE_IS_PRIMARY_INSTANCE=
|
||||
|
||||
# transport://[userid:password]@hostname[:port]/[virtual_host]
|
||||
NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI=
|
||||
|
||||
# NOTE: Get these from the nopaque development team
|
||||
NOPAQUE_DOCKER_REGISTRY_USERNAME=
|
||||
NOPAQUE_DOCKER_REGISTRY_PASSWORD=
|
||||
|
||||
# DEFAULT: %Y-%m-%d %H:%M:%S
|
||||
# NOPAQUE_LOG_DATE_FORMAT=
|
||||
|
||||
# DEFAULT: [%(asctime)s] %(levelname)s in %(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s
|
||||
# NOPAQUE_LOG_FORMAT=
|
||||
|
||||
# DEFAULT: INFO
|
||||
# CHOOSE ONE: CRITICAL, ERROR, WARNING, INFO, DEBUG
|
||||
# NOPAQUE_LOG_LEVEL=
|
||||
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: True
|
||||
# NOPAQUE_LOG_FILE_ENABLED=
|
||||
|
||||
# DEFAULT: <nopaque-basedir>/logs
|
||||
# NOTE: Use `.` as <nopaque-basedir>
|
||||
# NOPAQUE_LOG_FILE_DIR=
|
||||
|
||||
# DEFAULT: NOPAQUE_LOG_LEVEL
|
||||
# CHOOSE ONE: CRITICAL, ERROR, WARNING, INFO, DEBUG
|
||||
# NOPAQUE_LOG_FILE_LEVEL=
|
||||
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: False
|
||||
# NOPAQUE_LOG_STDERR_ENABLED=
|
||||
|
||||
# CHOOSE ONE: CRITICAL, ERROR, WARNING, INFO, DEBUG
|
||||
# DEFAULT: NOPAQUE_LOG_LEVEL
|
||||
# NOPAQUE_LOG_STDERR_LEVEL=
|
||||
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: False
|
||||
# HINT: Set this to True only if you are using a proxy in front of nopaque
|
||||
# NOPAQUE_PROXY_FIX_ENABLED=
|
||||
|
||||
# DEFAULT: 0
|
||||
# Number of values to trust for X-Forwarded-For
|
||||
# NOPAQUE_PROXY_FIX_X_FOR=
|
||||
|
||||
# DEFAULT: 0
|
||||
# Number of values to trust for X-Forwarded-Host
|
||||
# NOPAQUE_PROXY_FIX_X_HOST=
|
||||
|
||||
# DEFAULT: 0
|
||||
# Number of values to trust for X-Forwarded-Port
|
||||
# NOPAQUE_PROXY_FIX_X_PORT=
|
||||
|
||||
# DEFAULT: 0
|
||||
# Number of values to trust for X-Forwarded-Prefix
|
||||
# NOPAQUE_PROXY_FIX_X_PREFIX=
|
||||
|
||||
# DEFAULT: 0
|
||||
# Number of values to trust for X-Forwarded-Proto
|
||||
# NOPAQUE_PROXY_FIX_X_PROTO=
|
||||
|
||||
# CHOOSE ONE: False, True
|
||||
# DEFAULT: False
|
||||
# NOPAQUE_TRANSKRIBUS_ENABLED=
|
||||
|
||||
# READ-COOP account data: https://readcoop.eu/
|
||||
# NOPAQUE_READCOOP_USERNAME=
|
||||
# NOPAQUE_READCOOP_PASSWORD=
|
||||
|
2
.gitignore
vendored
@ -1,6 +1,6 @@
|
||||
# nopaque specifics
|
||||
app/static/gen/
|
||||
volumes/
|
||||
data/
|
||||
docker-compose.override.yml
|
||||
logs/
|
||||
!logs/dummy
|
||||
|
@ -1,84 +0,0 @@
|
||||
include:
|
||||
- template: Security/Container-Scanning.gitlab-ci.yml
|
||||
|
||||
##############################################################################
|
||||
# Pipeline stages in order of execution #
|
||||
##############################################################################
|
||||
stages:
|
||||
- build
|
||||
- publish
|
||||
- sca
|
||||
|
||||
##############################################################################
|
||||
# Pipeline behavior #
|
||||
##############################################################################
|
||||
workflow:
|
||||
rules:
|
||||
# Run the pipeline on commits to the default branch
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
variables:
|
||||
# Set the Docker image tag to `latest`
|
||||
DOCKER_IMAGE: $CI_REGISTRY_IMAGE:latest
|
||||
when: always
|
||||
# Run the pipeline on tag creation
|
||||
- if: $CI_COMMIT_TAG
|
||||
variables:
|
||||
# Set the Docker image tag to the Git tag name
|
||||
DOCKER_IMAGE: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME
|
||||
when: always
|
||||
# Don't run the pipeline on all other occasions
|
||||
- when: never
|
||||
|
||||
##############################################################################
|
||||
# Default values for pipeline jobs #
|
||||
##############################################################################
|
||||
default:
|
||||
image: docker:24.0.6
|
||||
services:
|
||||
- docker:24.0.6-dind
|
||||
tags:
|
||||
- docker
|
||||
|
||||
##############################################################################
|
||||
# CI/CD variables for all jobs in the pipeline #
|
||||
##############################################################################
|
||||
variables:
|
||||
DOCKER_TLS_CERTDIR: /certs
|
||||
DOCKER_BUILD_PATH: .
|
||||
DOCKERFILE: Dockerfile
|
||||
|
||||
##############################################################################
|
||||
# Pipeline jobs #
|
||||
##############################################################################
|
||||
build:
|
||||
stage: build
|
||||
script:
|
||||
- docker build --tag $DOCKER_IMAGE --file $DOCKERFILE $DOCKER_BUILD_PATH
|
||||
- docker save $DOCKER_IMAGE > docker_image.tar
|
||||
artifacts:
|
||||
paths:
|
||||
- docker_image.tar
|
||||
|
||||
publish:
|
||||
stage: publish
|
||||
before_script:
|
||||
- docker login --username gitlab-ci-token --password $CI_JOB_TOKEN $CI_REGISTRY
|
||||
script:
|
||||
- docker load --input docker_image.tar
|
||||
- docker push $DOCKER_IMAGE
|
||||
after_script:
|
||||
- docker logout $CI_REGISTRY
|
||||
|
||||
container_scanning:
|
||||
stage: sca
|
||||
rules:
|
||||
# Run the job on commits to the default branch
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
# Run the job on tag creation
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
# Don't run the job on all other occasions
|
||||
- when: never
|
||||
variables:
|
||||
CS_IMAGE: $DOCKER_IMAGE
|
5
.vscode/extensions.json
vendored
@ -1,8 +1,7 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"irongeek.vscode-env",
|
||||
"samuelcolvin.jinjahtml",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"ms-python.python",
|
||||
"samuelcolvin.jinjahtml"
|
||||
"ms-python.python"
|
||||
]
|
||||
}
|
||||
|
6
.vscode/settings.json
vendored
@ -1,9 +1,13 @@
|
||||
{
|
||||
"editor.rulers": [79],
|
||||
"files.insertFinalNewline": true,
|
||||
"python.terminal.activateEnvironment": false,
|
||||
"[css]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[scss]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[html]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
@ -13,7 +17,7 @@
|
||||
"[jinja-html]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[scss]": {
|
||||
"[jinja-js]": {
|
||||
"editor.tabSize": 2
|
||||
}
|
||||
}
|
||||
|
40
Dockerfile
@ -1,60 +1,50 @@
|
||||
FROM python:3.10.13-slim-bookworm
|
||||
FROM python:3.8.10-slim-buster
|
||||
|
||||
|
||||
LABEL authors="Patrick Jentsch <p.jentsch@uni-bielefeld.de>"
|
||||
|
||||
|
||||
# Set environment variables
|
||||
ARG DOCKER_GID
|
||||
ARG UID
|
||||
ARG GID
|
||||
|
||||
|
||||
ENV LANG="C.UTF-8"
|
||||
ENV PYTHONDONTWRITEBYTECODE="1"
|
||||
ENV PYTHONUNBUFFERED="1"
|
||||
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install --no-install-recommends --yes \
|
||||
build-essential \
|
||||
gosu \
|
||||
libpq-dev \
|
||||
&& rm --recursive /var/lib/apt/lists/*
|
||||
|
||||
|
||||
# Create a non-root user
|
||||
RUN useradd --create-home --no-log-init nopaque \
|
||||
&& groupadd docker \
|
||||
&& usermod --append --groups docker nopaque
|
||||
|
||||
RUN groupadd --gid "${DOCKER_GID}" docker \
|
||||
&& groupadd --gid "${GID}" nopaque \
|
||||
&& useradd --create-home --gid nopaque --groups "${DOCKER_GID}" --no-log-init --uid "${UID}" nopaque
|
||||
USER nopaque
|
||||
WORKDIR /home/nopaque
|
||||
|
||||
|
||||
# Create a Python virtual environment
|
||||
ENV NOPAQUE_PYTHON3_VENV_PATH="/home/nopaque/.venv"
|
||||
RUN python3 -m venv "${NOPAQUE_PYTHON3_VENV_PATH}"
|
||||
ENV PATH="${NOPAQUE_PYTHON3_VENV_PATH}/bin:${PATH}"
|
||||
ENV PYTHON3_VENV_PATH="/home/nopaque/venv"
|
||||
RUN python3 -m venv "${PYTHON3_VENV_PATH}"
|
||||
ENV PATH="${PYTHON3_VENV_PATH}/bin:${PATH}"
|
||||
|
||||
|
||||
# Install Python dependencies
|
||||
COPY --chown=nopaque:nopaque requirements.txt requirements.txt
|
||||
COPY --chown=nopaque:nopaque requirements.txt .
|
||||
RUN python3 -m pip install --requirement requirements.txt \
|
||||
&& rm requirements.txt
|
||||
|
||||
|
||||
# Install the application
|
||||
COPY docker-nopaque-entrypoint.sh /usr/local/bin/
|
||||
|
||||
COPY --chown=nopaque:nopaque app app
|
||||
COPY --chown=nopaque:nopaque migrations migrations
|
||||
COPY --chown=nopaque:nopaque tests tests
|
||||
COPY --chown=nopaque:nopaque .flaskenv boot.sh config.py nopaque.py requirements.txt ./
|
||||
|
||||
RUN mkdir logs
|
||||
COPY --chown=nopaque:nopaque .flaskenv boot.sh config.py nopaque.py ./
|
||||
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
|
||||
USER root
|
||||
|
||||
|
||||
ENTRYPOINT ["docker-nopaque-entrypoint.sh"]
|
||||
ENTRYPOINT ["./boot.sh"]
|
||||
|
@ -1,8 +1,5 @@
|
||||
# nopaque
|
||||
|
||||

|
||||

|
||||
|
||||
nopaque bundles various tools and services that provide humanities scholars with DH methods and thus can support their various individual research processes. Using nopaque, researchers can subject digitized sources to Optical Character Recognition (OCR). The resulting text files can then be used as a data basis for Natural Language Processing (NLP). The texts are automatically subjected to various linguistic annotations. The data processed via NLP can then be summarized in the web application as corpora and analyzed by means of an information retrieval system through complex search queries. The range of functions of the web application will be successively extended according to the needs of the researchers.
|
||||
|
||||
## Prerequisites and requirements
|
||||
|
@ -8,7 +8,7 @@
|
||||
pipeline_name: 'ca_core_news_md'
|
||||
version: '3.2.0'
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.0'
|
||||
- title: 'German'
|
||||
description: 'German pipeline optimized for CPU. Components: tok2vec, tagger, morphologizer, parser, senter, ner, attribute_ruler, lemmatizer.'
|
||||
url: 'https://github.com/explosion/spacy-models/releases/download/de_core_news_md-3.2.0/de_core_news_md-3.2.0.tar.gz'
|
||||
@ -19,7 +19,7 @@
|
||||
pipeline_name: 'de_core_news_md'
|
||||
version: '3.2.0'
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.0'
|
||||
- title: 'Greek'
|
||||
description: 'Greek pipeline optimized for CPU. Components: tok2vec, morphologizer, parser, senter, ner, attribute_ruler, lemmatizer.'
|
||||
url: 'https://github.com/explosion/spacy-models/releases/download/el_core_news_md-3.2.0/el_core_news_md-3.2.0.tar.gz'
|
||||
@ -180,7 +180,7 @@
|
||||
version: '3.4.0'
|
||||
compatible_service_versions:
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
- '0.1.2'
|
||||
- title: 'Italian'
|
||||
description: 'Italian pipeline optimized for CPU. Components: tok2vec, morphologizer, tagger, parser, lemmatizer (trainable_lemmatizer), senter, ner'
|
||||
url: 'https://github.com/explosion/spacy-models/releases/download/it_core_news_md-3.4.0/it_core_news_md-3.4.0.tar.gz'
|
@ -9,7 +9,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Amharic'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/amh.traineddata'
|
||||
@ -21,7 +20,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'Arabic'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ara.traineddata'
|
||||
@ -33,7 +31,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Assamese'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/asm.traineddata'
|
||||
@ -45,7 +42,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Azerbaijani'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/aze.traineddata'
|
||||
@ -57,7 +53,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Azerbaijani - Cyrillic'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/aze_cyrl.traineddata'
|
||||
@ -69,7 +64,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Belarusian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/bel.traineddata'
|
||||
@ -81,7 +75,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Bengali'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ben.traineddata'
|
||||
@ -93,7 +86,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Tibetan'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/bod.traineddata'
|
||||
@ -105,7 +97,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Bosnian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/bos.traineddata'
|
||||
@ -117,7 +108,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Bulgarian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/bul.traineddata'
|
||||
@ -129,7 +119,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Catalan; Valencian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/cat.traineddata'
|
||||
@ -141,7 +130,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Cebuano'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ceb.traineddata'
|
||||
@ -153,7 +141,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Czech'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ces.traineddata'
|
||||
@ -165,7 +152,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Chinese - Simplified'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/chi_sim.traineddata'
|
||||
@ -177,7 +163,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'Chinese - Traditional'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/chi_tra.traineddata'
|
||||
@ -189,7 +174,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Cherokee'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/chr.traineddata'
|
||||
@ -201,7 +185,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Welsh'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/cym.traineddata'
|
||||
@ -213,7 +196,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'Danish'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/dan.traineddata'
|
||||
@ -225,7 +207,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
- title: 'German'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/deu.traineddata'
|
||||
@ -237,7 +218,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Dzongkha'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/dzo.traineddata'
|
||||
@ -249,7 +229,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'Greek, Modern (1453-)'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ell.traineddata'
|
||||
@ -261,7 +240,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
- title: 'English'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/eng.traineddata'
|
||||
@ -273,7 +251,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
- title: 'English, Middle (1100-1500)'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/enm.traineddata'
|
||||
@ -285,7 +262,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Esperanto'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/epo.traineddata'
|
||||
@ -297,7 +273,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Estonian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/est.traineddata'
|
||||
@ -309,7 +284,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Basque'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/eus.traineddata'
|
||||
@ -321,7 +295,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Persian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/fas.traineddata'
|
||||
@ -333,7 +306,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Finnish'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/fin.traineddata'
|
||||
@ -345,7 +317,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'French'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/fra.traineddata'
|
||||
@ -357,7 +328,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
- title: 'German Fraktur'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/frk.traineddata'
|
||||
@ -369,7 +339,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
- title: 'French, Middle (ca. 1400-1600)'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/frm.traineddata'
|
||||
@ -381,7 +350,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Irish'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/gle.traineddata'
|
||||
@ -393,7 +361,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Galician'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/glg.traineddata'
|
||||
@ -405,7 +372,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'Greek, Ancient (-1453)'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/grc.traineddata'
|
||||
@ -417,7 +383,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Gujarati'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/guj.traineddata'
|
||||
@ -429,7 +394,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Haitian; Haitian Creole'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/hat.traineddata'
|
||||
@ -441,7 +405,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Hebrew'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/heb.traineddata'
|
||||
@ -453,7 +416,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Hindi'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/hin.traineddata'
|
||||
@ -465,7 +427,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Croatian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/hrv.traineddata'
|
||||
@ -477,7 +438,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Hungarian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/hun.traineddata'
|
||||
@ -489,7 +449,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Inuktitut'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/iku.traineddata'
|
||||
@ -501,7 +460,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Indonesian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ind.traineddata'
|
||||
@ -513,7 +471,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Icelandic'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/isl.traineddata'
|
||||
@ -525,7 +482,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'Italian'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ita.traineddata'
|
||||
@ -537,7 +493,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
- title: 'Italian - Old'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ita_old.traineddata'
|
||||
@ -549,7 +504,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Javanese'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/jav.traineddata'
|
||||
@ -561,7 +515,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Japanese'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/jpn.traineddata'
|
||||
@ -573,7 +526,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Kannada'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/kan.traineddata'
|
||||
@ -585,7 +537,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Georgian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/kat.traineddata'
|
||||
@ -597,7 +548,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Georgian - Old'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/kat_old.traineddata'
|
||||
@ -609,7 +559,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Kazakh'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/kaz.traineddata'
|
||||
@ -621,7 +570,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Central Khmer'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/khm.traineddata'
|
||||
@ -633,7 +581,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Kirghiz; Kyrgyz'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/kir.traineddata'
|
||||
@ -645,7 +592,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Korean'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/kor.traineddata'
|
||||
@ -657,7 +603,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Kurdish'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/kur.traineddata'
|
||||
@ -669,7 +614,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Lao'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/lao.traineddata'
|
||||
@ -681,7 +625,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Latin'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/lat.traineddata'
|
||||
@ -693,7 +636,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Latvian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/lav.traineddata'
|
||||
@ -705,7 +647,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Lithuanian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/lit.traineddata'
|
||||
@ -717,7 +658,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Malayalam'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/mal.traineddata'
|
||||
@ -729,7 +669,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Marathi'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/mar.traineddata'
|
||||
@ -741,7 +680,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Macedonian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/mkd.traineddata'
|
||||
@ -753,7 +691,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Maltese'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/mlt.traineddata'
|
||||
@ -765,7 +702,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Malay'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/msa.traineddata'
|
||||
@ -777,7 +713,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Burmese'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/mya.traineddata'
|
||||
@ -789,7 +724,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Nepali'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/nep.traineddata'
|
||||
@ -801,7 +735,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Dutch; Flemish'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/nld.traineddata'
|
||||
@ -813,7 +746,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Norwegian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/nor.traineddata'
|
||||
@ -825,7 +757,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Oriya'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ori.traineddata'
|
||||
@ -837,7 +768,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Panjabi; Punjabi'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/pan.traineddata'
|
||||
@ -849,7 +779,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Polish'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/pol.traineddata'
|
||||
@ -861,7 +790,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'Portuguese'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/por.traineddata'
|
||||
@ -873,7 +801,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Pushto; Pashto'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/pus.traineddata'
|
||||
@ -885,7 +812,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Romanian; Moldavian; Moldovan'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ron.traineddata'
|
||||
@ -897,7 +823,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'Russian'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/rus.traineddata'
|
||||
@ -909,7 +834,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Sanskrit'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/san.traineddata'
|
||||
@ -921,7 +845,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Sinhala; Sinhalese'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/sin.traineddata'
|
||||
@ -933,7 +856,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Slovak'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/slk.traineddata'
|
||||
@ -945,7 +867,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Slovenian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/slv.traineddata'
|
||||
@ -957,7 +878,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
- title: 'Spanish; Castilian'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/spa.traineddata'
|
||||
@ -969,7 +889,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
- title: 'Spanish; Castilian - Old'
|
||||
description: ''
|
||||
url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/spa_old.traineddata'
|
||||
@ -981,7 +900,6 @@
|
||||
compatible_service_versions:
|
||||
- '0.1.0'
|
||||
- '0.1.1'
|
||||
- '0.1.2'
|
||||
# - title: 'Albanian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/sqi.traineddata'
|
||||
@ -993,7 +911,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Serbian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/srp.traineddata'
|
||||
@ -1005,7 +922,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Serbian - Latin'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/srp_latn.traineddata'
|
||||
@ -1017,7 +933,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Swahili'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/swa.traineddata'
|
||||
@ -1029,7 +944,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Swedish'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/swe.traineddata'
|
||||
@ -1041,7 +955,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Syriac'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/syr.traineddata'
|
||||
@ -1053,7 +966,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Tamil'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/tam.traineddata'
|
||||
@ -1065,7 +977,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Telugu'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/tel.traineddata'
|
||||
@ -1077,7 +988,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Tajik'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/tgk.traineddata'
|
||||
@ -1089,7 +999,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Tagalog'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/tgl.traineddata'
|
||||
@ -1101,7 +1010,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Thai'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/tha.traineddata'
|
||||
@ -1113,7 +1021,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Tigrinya'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/tir.traineddata'
|
||||
@ -1125,7 +1032,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Turkish'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/tur.traineddata'
|
||||
@ -1137,7 +1043,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Uighur; Uyghur'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/uig.traineddata'
|
||||
@ -1149,7 +1054,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Ukrainian'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/ukr.traineddata'
|
||||
@ -1161,7 +1065,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Urdu'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/urd.traineddata'
|
||||
@ -1173,7 +1076,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Uzbek'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/uzb.traineddata'
|
||||
@ -1185,7 +1087,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Uzbek - Cyrillic'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/uzb_cyrl.traineddata'
|
||||
@ -1197,7 +1098,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Vietnamese'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/vie.traineddata'
|
||||
@ -1209,7 +1109,6 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
||||
# - title: 'Yiddish'
|
||||
# description: ''
|
||||
# url: 'https://github.com/tesseract-ocr/tessdata/raw/4.1.0/yid.traineddata'
|
||||
@ -1221,4 +1120,3 @@
|
||||
# compatible_service_versions:
|
||||
# - '0.1.0'
|
||||
# - '0.1.1'
|
||||
# - '0.1.2'
|
@ -13,6 +13,7 @@ from flask_paranoid import Paranoid
|
||||
from flask_socketio import SocketIO
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from flask_hashids import Hashids
|
||||
from werkzeug.exceptions import HTTPException
|
||||
|
||||
|
||||
apifairy = APIFairy()
|
||||
@ -57,9 +58,6 @@ def create_app(config: Config = Config) -> Flask:
|
||||
scheduler.init_app(app)
|
||||
socketio.init_app(app, message_queue=app.config['NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI']) # noqa
|
||||
|
||||
from .models.event_listeners import register_event_listeners
|
||||
register_event_listeners()
|
||||
|
||||
from .admin import bp as admin_blueprint
|
||||
default_breadcrumb_root(admin_blueprint, '.admin')
|
||||
app.register_blueprint(admin_blueprint, url_prefix='/admin')
|
||||
@ -102,7 +100,7 @@ def create_app(config: Config = Config) -> Flask:
|
||||
|
||||
from .users import bp as users_blueprint
|
||||
default_breadcrumb_root(users_blueprint, '.users')
|
||||
app.register_blueprint(users_blueprint, cli_group='user', url_prefix='/users')
|
||||
app.register_blueprint(users_blueprint, url_prefix='/users')
|
||||
|
||||
from .workshops import bp as workshops_blueprint
|
||||
app.register_blueprint(workshops_blueprint, url_prefix='/workshops')
|
||||
|
@ -16,8 +16,8 @@ class CreateSpaCyNLPPipelineModelForm(ContributionBaseForm):
|
||||
)
|
||||
|
||||
def validate_spacy_model_file(self, field):
|
||||
if not field.data.filename.lower().endswith(('.tar.gz', ('.whl'))):
|
||||
raise ValidationError('.tar.gz or .whl files only!')
|
||||
if not field.data.filename.lower().endswith('.tar.gz'):
|
||||
raise ValidationError('.tar.gz files only!')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if 'prefix' not in kwargs:
|
||||
|
@ -2,69 +2,80 @@ from flask import current_app
|
||||
from app import db
|
||||
from app.models import User, Corpus, CorpusFile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
||||
class SandpaperConverter:
|
||||
def __init__(self, json_db_file: Path, data_dir: Path):
|
||||
def __init__(self, json_db_file, data_dir):
|
||||
self.json_db_file = json_db_file
|
||||
self.data_dir = data_dir
|
||||
|
||||
def run(self):
|
||||
with self.json_db_file.open('r') as f:
|
||||
json_db: List[Dict] = json.load(f)
|
||||
with open(self.json_db_file, 'r') as f:
|
||||
json_db = json.loads(f.read())
|
||||
|
||||
for json_user in json_db:
|
||||
if not json_user['confirmed']:
|
||||
current_app.logger.info(f'Skip unconfirmed user {json_user["username"]}')
|
||||
continue
|
||||
user_dir = self.data_dir / f'{json_user["id"]}'
|
||||
user_dir = os.path.join(self.data_dir, str(json_user['id']))
|
||||
self.convert_user(json_user, user_dir)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def convert_user(self, json_user: Dict, user_dir: Path):
|
||||
def convert_user(self, json_user, user_dir):
|
||||
current_app.logger.info(f'Create User {json_user["username"]}...')
|
||||
user = User(
|
||||
confirmed=json_user['confirmed'],
|
||||
email=json_user['email'],
|
||||
last_seen=datetime.fromtimestamp(json_user['last_seen']),
|
||||
member_since=datetime.fromtimestamp(json_user['member_since']),
|
||||
password_hash=json_user['password_hash'], # TODO: Needs to be added manually
|
||||
username=json_user['username']
|
||||
)
|
||||
db.session.add(user)
|
||||
db.session.flush(objects=[user])
|
||||
db.session.refresh(user)
|
||||
try:
|
||||
user = User.create(
|
||||
confirmed=json_user['confirmed'],
|
||||
email=json_user['email'],
|
||||
last_seen=datetime.fromtimestamp(json_user['last_seen']),
|
||||
member_since=datetime.fromtimestamp(json_user['member_since']),
|
||||
password_hash=json_user['password_hash'], # TODO: Needs to be added manually
|
||||
username=json_user['username']
|
||||
)
|
||||
except OSError:
|
||||
user.makedirs()
|
||||
except OSError as e:
|
||||
current_app.logger.error(e)
|
||||
db.session.rollback()
|
||||
raise Exception('Internal Server Error')
|
||||
for json_corpus in json_user['corpora'].values():
|
||||
if not json_corpus['files'].values():
|
||||
current_app.logger.info(f'Skip empty corpus {json_corpus["title"]}')
|
||||
continue
|
||||
corpus_dir = user_dir / 'corpora' / f'{json_corpus["id"]}'
|
||||
corpus_dir = os.path.join(user_dir, 'corpora', str(json_corpus['id']))
|
||||
self.convert_corpus(json_corpus, user, corpus_dir)
|
||||
current_app.logger.info('Done')
|
||||
|
||||
|
||||
def convert_corpus(self, json_corpus: Dict, user: User, corpus_dir: Path):
|
||||
def convert_corpus(self, json_corpus, user, corpus_dir):
|
||||
current_app.logger.info(f'Create Corpus {json_corpus["title"]}...')
|
||||
corpus = Corpus(
|
||||
user=user,
|
||||
creation_date=datetime.fromtimestamp(json_corpus['creation_date']),
|
||||
description=json_corpus['description'],
|
||||
title=json_corpus['title']
|
||||
)
|
||||
db.session.add(corpus)
|
||||
db.session.flush(objects=[corpus])
|
||||
db.session.refresh(corpus)
|
||||
try:
|
||||
corpus = Corpus.create(
|
||||
user=user,
|
||||
creation_date=datetime.fromtimestamp(json_corpus['creation_date']),
|
||||
description=json_corpus['description'],
|
||||
title=json_corpus['title']
|
||||
)
|
||||
except OSError:
|
||||
corpus.makedirs()
|
||||
except OSError as e:
|
||||
current_app.logger.error(e)
|
||||
db.session.rollback()
|
||||
raise Exception('Internal Server Error')
|
||||
for json_corpus_file in json_corpus['files'].values():
|
||||
self.convert_corpus_file(json_corpus_file, corpus, corpus_dir)
|
||||
current_app.logger.info('Done')
|
||||
|
||||
|
||||
def convert_corpus_file(self, json_corpus_file: Dict, corpus: Corpus, corpus_dir: Path):
|
||||
def convert_corpus_file(self, json_corpus_file, corpus, corpus_dir):
|
||||
current_app.logger.info(f'Create CorpusFile {json_corpus_file["title"]}...')
|
||||
corpus_file = CorpusFile(
|
||||
corpus=corpus,
|
||||
@ -88,13 +99,13 @@ class SandpaperConverter:
|
||||
db.session.refresh(corpus_file)
|
||||
try:
|
||||
shutil.copy2(
|
||||
corpus_dir / json_corpus_file['filename'],
|
||||
os.path.join(corpus_dir, json_corpus_file['filename']),
|
||||
corpus_file.path
|
||||
)
|
||||
except:
|
||||
current_app.logger.warning(
|
||||
'Can not convert corpus file: '
|
||||
f'{corpus_dir / json_corpus_file["filename"]}'
|
||||
f'{os.path.join(corpus_dir, json_corpus_file["filename"])}'
|
||||
' -> '
|
||||
f'{corpus_file.path}'
|
||||
)
|
||||
|
@ -1,7 +1,7 @@
|
||||
from flask import current_app
|
||||
from app.models import Corpus, CorpusStatus
|
||||
import os
|
||||
import shutil
|
||||
from app import db
|
||||
from app.models import Corpus, CorpusStatus
|
||||
from . import bp
|
||||
|
||||
|
||||
@ -18,17 +18,10 @@ def reset():
|
||||
]
|
||||
for corpus in [x for x in Corpus.query.all() if x.status in status]:
|
||||
print(f'Resetting corpus {corpus}')
|
||||
corpus_cwb_dir = corpus.path / 'cwb'
|
||||
corpus_cwb_data_dir = corpus_cwb_dir / 'data'
|
||||
corpus_cwb_registry_dir = corpus_cwb_dir / 'registry'
|
||||
try:
|
||||
shutil.rmtree(corpus.path / 'cwb', ignore_errors=True)
|
||||
corpus_cwb_dir.mkdir()
|
||||
corpus_cwb_data_dir.mkdir()
|
||||
corpus_cwb_registry_dir.mkdir()
|
||||
except OSError as e:
|
||||
current_app.logger.error(e)
|
||||
raise
|
||||
shutil.rmtree(os.path.join(corpus.path, 'cwb'), ignore_errors=True)
|
||||
os.mkdir(os.path.join(corpus.path, 'cwb'))
|
||||
os.mkdir(os.path.join(corpus.path, 'cwb', 'data'))
|
||||
os.mkdir(os.path.join(corpus.path, 'cwb', 'registry'))
|
||||
corpus.status = CorpusStatus.UNPREPARED
|
||||
corpus.num_analysis_sessions = 0
|
||||
db.session.commit()
|
||||
|
@ -1,14 +1,13 @@
|
||||
from cqi import CQiClient
|
||||
from cqi.errors import CQiException
|
||||
from cqi.status import CQiStatus
|
||||
from docker.models.containers import Container
|
||||
from flask import current_app, session
|
||||
from flask import session
|
||||
from flask_login import current_user
|
||||
from flask_socketio import Namespace
|
||||
from inspect import signature
|
||||
from threading import Lock
|
||||
from typing import Callable, Dict, List, Optional
|
||||
from app import db, docker_client, hashids, socketio
|
||||
from typing import Callable, Dict, List
|
||||
from app import db, hashids, socketio
|
||||
from app.decorators import socketio_login_required
|
||||
from app.models import Corpus, CorpusStatus
|
||||
from . import extensions
|
||||
@ -93,8 +92,8 @@ class CQiNamespace(Namespace):
|
||||
|
||||
@socketio_login_required
|
||||
def on_init(self, db_corpus_hashid: str):
|
||||
db_corpus_id: int = hashids.decode(db_corpus_hashid)
|
||||
db_corpus: Optional[Corpus] = Corpus.query.get(db_corpus_id)
|
||||
db_corpus_id = hashids.decode(db_corpus_hashid)
|
||||
db_corpus = Corpus.query.get(db_corpus_id)
|
||||
if db_corpus is None:
|
||||
return {'code': 404, 'msg': 'Not Found'}
|
||||
if not (db_corpus.user == current_user
|
||||
@ -113,7 +112,7 @@ class CQiNamespace(Namespace):
|
||||
db.session.commit()
|
||||
db_corpus.num_analysis_sessions = Corpus.num_analysis_sessions + 1
|
||||
db.session.commit()
|
||||
retry_counter: int = 20
|
||||
retry_counter = 20
|
||||
while db_corpus.status != CorpusStatus.RUNNING_ANALYSIS_SESSION:
|
||||
if retry_counter == 0:
|
||||
db_corpus.num_analysis_sessions = Corpus.num_analysis_sessions - 1
|
||||
@ -122,16 +121,11 @@ class CQiNamespace(Namespace):
|
||||
socketio.sleep(3)
|
||||
retry_counter -= 1
|
||||
db.session.refresh(db_corpus)
|
||||
# cqi_client: CQiClient = CQiClient(f'cqpserver_{db_corpus_id}')
|
||||
cqpserver_container_name: str = f'cqpserver_{db_corpus_id}'
|
||||
cqpserver_container: Container = docker_client.containers.get(cqpserver_container_name)
|
||||
cqpserver_host: str = cqpserver_container.attrs['NetworkSettings']['Networks'][current_app.config['NOPAQUE_DOCKER_NETWORK_NAME']]['IPAddress']
|
||||
cqi_client: CQiClient = CQiClient(cqpserver_host)
|
||||
session['cqi_over_sio'] = {
|
||||
'cqi_client': cqi_client,
|
||||
'cqi_client_lock': Lock(),
|
||||
'db_corpus_id': db_corpus_id
|
||||
}
|
||||
cqi_client = CQiClient(f'cqpserver_{db_corpus_id}', timeout=float('inf'))
|
||||
session['cqi_over_sio'] = {}
|
||||
session['cqi_over_sio']['cqi_client'] = cqi_client
|
||||
session['cqi_over_sio']['cqi_client_lock'] = Lock()
|
||||
session['cqi_over_sio']['db_corpus_id'] = db_corpus_id
|
||||
return {'code': 200, 'msg': 'OK'}
|
||||
|
||||
@socketio_login_required
|
||||
@ -199,8 +193,7 @@ class CQiNamespace(Namespace):
|
||||
except (BrokenPipeError, CQiException):
|
||||
pass
|
||||
cqi_client_lock.release()
|
||||
db_corpus: Optional[Corpus] = Corpus.query.get(db_corpus_id)
|
||||
if db_corpus is None:
|
||||
return
|
||||
db_corpus.num_analysis_sessions = Corpus.num_analysis_sessions - 1
|
||||
db.session.commit()
|
||||
db_corpus = Corpus.query.get(db_corpus_id)
|
||||
if db_corpus is not None:
|
||||
db_corpus.num_analysis_sessions = Corpus.num_analysis_sessions - 1
|
||||
db.session.commit()
|
||||
|
@ -1,7 +1,6 @@
|
||||
from collections import Counter
|
||||
from cqi import CQiClient
|
||||
from cqi.models.corpora import Corpus as CQiCorpus
|
||||
from cqi.models.subcorpora import Subcorpus as CQiSubcorpus
|
||||
from cqi.models.attributes import (
|
||||
PositionalAttribute as CQiPositionalAttribute,
|
||||
StructuralAttribute as CQiStructuralAttribute
|
||||
@ -12,6 +11,7 @@ from typing import Dict, List
|
||||
import gzip
|
||||
import json
|
||||
import math
|
||||
import os
|
||||
from app import db
|
||||
from app.models import Corpus
|
||||
from .utils import lookups_by_cpos, partial_export_subcorpus, export_subcorpus
|
||||
@ -40,132 +40,161 @@ def ext_corpus_update_db(corpus: str) -> CQiStatusOk:
|
||||
def ext_corpus_static_data(corpus: str) -> Dict:
|
||||
db_corpus_id: int = session['cqi_over_sio']['db_corpus_id']
|
||||
db_corpus: Corpus = Corpus.query.get(db_corpus_id)
|
||||
|
||||
static_data_file_path = db_corpus.path / 'cwb' / 'static.json.gz'
|
||||
if static_data_file_path.exists():
|
||||
with static_data_file_path.open('rb') as f:
|
||||
cache_file_path: str = os.path.join(db_corpus.path, 'cwb', 'static.json.gz')
|
||||
if os.path.exists(cache_file_path):
|
||||
with open(cache_file_path, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
cqi_client: CQiClient = session['cqi_over_sio']['cqi_client']
|
||||
cqi_corpus: CQiCorpus = cqi_client.corpora.get(corpus)
|
||||
cqi_p_attrs: List[CQiPositionalAttribute] = cqi_corpus.positional_attributes.list()
|
||||
cqi_s_attrs: List[CQiStructuralAttribute] = cqi_corpus.structural_attributes.list()
|
||||
|
||||
static_data = {
|
||||
cqi_p_attrs: Dict[str, CQiPositionalAttribute] = {
|
||||
p_attr.name: p_attr
|
||||
for p_attr in cqi_corpus.positional_attributes.list()
|
||||
}
|
||||
cqi_s_attrs: Dict[str, CQiStructuralAttribute] = {
|
||||
s_attr.name: s_attr
|
||||
for s_attr in cqi_corpus.structural_attributes.list()
|
||||
}
|
||||
static_corpus_data = {
|
||||
'corpus': {
|
||||
'bounds': [0, cqi_corpus.size - 1],
|
||||
'counts': {
|
||||
'token': cqi_corpus.size
|
||||
},
|
||||
'freqs': {}
|
||||
},
|
||||
'p_attrs': {},
|
||||
's_attrs': {},
|
||||
'values': {'p_attrs': {}, 's_attrs': {}}
|
||||
}
|
||||
|
||||
for p_attr in cqi_p_attrs:
|
||||
print(f'corpus.freqs.{p_attr.name}')
|
||||
static_data['corpus']['freqs'][p_attr.name] = []
|
||||
p_attr_id_list: List[int] = list(range(p_attr.lexicon_size))
|
||||
static_data['corpus']['freqs'][p_attr.name].extend(p_attr.freqs_by_ids(p_attr_id_list))
|
||||
for p_attr in cqi_p_attrs.values():
|
||||
static_corpus_data['corpus']['freqs'][p_attr.name] = {}
|
||||
chunk_size = 10000
|
||||
p_attr_id_list = list(range(p_attr.lexicon_size))
|
||||
chunks = [p_attr_id_list[i:i+chunk_size] for i in range(0, len(p_attr_id_list), chunk_size)]
|
||||
del p_attr_id_list
|
||||
|
||||
print(f'p_attrs.{p_attr.name}')
|
||||
static_data['p_attrs'][p_attr.name] = []
|
||||
cpos_list: List[int] = list(range(cqi_corpus.size))
|
||||
static_data['p_attrs'][p_attr.name].extend(p_attr.ids_by_cpos(cpos_list))
|
||||
for chunk in chunks:
|
||||
# print(f'corpus.freqs.{p_attr.name}: {chunk[0]} - {chunk[-1]}')
|
||||
static_corpus_data['corpus']['freqs'][p_attr.name].update(
|
||||
dict(zip(chunk, p_attr.freqs_by_ids(chunk)))
|
||||
)
|
||||
del chunks
|
||||
static_corpus_data['p_attrs'][p_attr.name] = {}
|
||||
cpos_list = list(range(cqi_corpus.size))
|
||||
chunks = [cpos_list[i:i+chunk_size] for i in range(0, len(cpos_list), chunk_size)]
|
||||
del cpos_list
|
||||
|
||||
print(f'values.p_attrs.{p_attr.name}')
|
||||
static_data['values']['p_attrs'][p_attr.name] = []
|
||||
p_attr_id_list: List[int] = list(range(p_attr.lexicon_size))
|
||||
static_data['values']['p_attrs'][p_attr.name].extend(p_attr.values_by_ids(p_attr_id_list))
|
||||
for chunk in chunks:
|
||||
# print(f'p_attrs.{p_attr.name}: {chunk[0]} - {chunk[-1]}')
|
||||
static_corpus_data['p_attrs'][p_attr.name].update(
|
||||
dict(zip(chunk, p_attr.ids_by_cpos(chunk)))
|
||||
)
|
||||
del chunks
|
||||
static_corpus_data['values']['p_attrs'][p_attr.name] = {}
|
||||
p_attr_id_list = list(range(p_attr.lexicon_size))
|
||||
chunks = [p_attr_id_list[i:i+chunk_size] for i in range(0, len(p_attr_id_list), chunk_size)]
|
||||
del p_attr_id_list
|
||||
|
||||
for s_attr in cqi_s_attrs:
|
||||
for chunk in chunks:
|
||||
# print(f'values.p_attrs.{p_attr.name}: {chunk[0]} - {chunk[-1]}')
|
||||
static_corpus_data['values']['p_attrs'][p_attr.name].update(
|
||||
dict(zip(chunk, p_attr.values_by_ids(chunk)))
|
||||
)
|
||||
del chunks
|
||||
for s_attr in cqi_s_attrs.values():
|
||||
if s_attr.has_values:
|
||||
continue
|
||||
|
||||
static_data['s_attrs'][s_attr.name] = {'lexicon': [], 'values': None}
|
||||
|
||||
if s_attr.name in ['s', 'ent']:
|
||||
##############################################################
|
||||
# A faster way to get cpos boundaries for smaller s_attrs #
|
||||
# Note: Needs more testing, don't use it in production #
|
||||
##############################################################
|
||||
cqi_corpus.query('Last', f'<{s_attr.name}> []* </{s_attr.name}>;')
|
||||
cqi_subcorpus: CQiSubcorpus = cqi_corpus.subcorpora.get('Last')
|
||||
first_match: int = 0
|
||||
last_match: int = cqi_subcorpus.size - 1
|
||||
match_boundaries = zip(
|
||||
range(first_match, last_match + 1),
|
||||
cqi_subcorpus.dump(
|
||||
cqi_subcorpus.fields['match'],
|
||||
first_match,
|
||||
last_match
|
||||
),
|
||||
cqi_subcorpus.dump(
|
||||
cqi_subcorpus.fields['matchend'],
|
||||
first_match,
|
||||
last_match
|
||||
)
|
||||
)
|
||||
cqi_subcorpus.drop()
|
||||
del cqi_subcorpus, first_match, last_match
|
||||
for id, lbound, rbound in match_boundaries:
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'].append({})
|
||||
print(f's_attrs.{s_attr.name}.lexicon.{id}.bounds')
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'][id]['bounds'] = [lbound, rbound]
|
||||
del match_boundaries
|
||||
|
||||
if s_attr.name != 'text':
|
||||
continue
|
||||
|
||||
static_corpus_data['corpus']['counts'][s_attr.name] = s_attr.size
|
||||
static_corpus_data['s_attrs'][s_attr.name] = {'lexicon': {}, 'values': None}
|
||||
static_corpus_data['values']['s_attrs'][s_attr.name] = {}
|
||||
##########################################################################
|
||||
# A faster way to get cpos boundaries for smaller s_attrs #
|
||||
##########################################################################
|
||||
# if s_attr.name in ['s', 'ent']:
|
||||
# cqi_corpus.query('Last', f'<{s_attr.name}> []* </{s_attr.name}>;')
|
||||
# cqi_subcorpus = cqi_corpus.subcorpora.get('Last')
|
||||
# first_match = 0
|
||||
# last_match = cqi_subcorpus.size - 1
|
||||
# match_boundaries = zip(
|
||||
# range(first_match, last_match + 1),
|
||||
# cqi_subcorpus.dump(cqi_subcorpus.fields['match'], first_match, last_match),
|
||||
# cqi_subcorpus.dump(cqi_subcorpus.fields['matchend'], first_match, last_match)
|
||||
# )
|
||||
# for id, lbound, rbound in match_boundaries:
|
||||
# static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id] = {}
|
||||
# static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['bounds'] = [lbound, rbound]
|
||||
# static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['counts'] = {}
|
||||
# static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['counts']['token'] = rbound - lbound + 1
|
||||
# cqi_subcorpus.drop()
|
||||
for id in range(0, s_attr.size):
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'].append({})
|
||||
# This is a very slow operation, thats why we only use it for
|
||||
# the text attribute
|
||||
# print(f's_attrs.{s_attr.name}.lexicon.{id}')
|
||||
static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id] = {
|
||||
'bounds': None,
|
||||
'counts': None,
|
||||
'freqs': None
|
||||
}
|
||||
if s_attr.name != 'text':
|
||||
continue
|
||||
lbound, rbound = s_attr.cpos_by_id(id)
|
||||
print(f's_attrs.{s_attr.name}.lexicon.{id}.bounds')
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'][id]['bounds'] = [lbound, rbound]
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'][id]['freqs'] = {}
|
||||
cpos_list: List[int] = list(range(lbound, rbound + 1))
|
||||
for p_attr in cqi_p_attrs:
|
||||
p_attr_ids: List[int] = []
|
||||
p_attr_ids.extend(p_attr.ids_by_cpos(cpos_list))
|
||||
print(f's_attrs.{s_attr.name}.lexicon.{id}.freqs.{p_attr.name}')
|
||||
static_data['s_attrs'][s_attr.name]['lexicon'][id]['freqs'][p_attr.name] = dict(Counter(p_attr_ids))
|
||||
del p_attr_ids
|
||||
# print(f's_attrs.{s_attr.name}.lexicon.{id}.bounds')
|
||||
static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['bounds'] = [lbound, rbound]
|
||||
# print(f's_attrs.{s_attr.name}.lexicon.{id}.counts')
|
||||
static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['counts'] = {}
|
||||
static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['counts']['token'] = rbound - lbound + 1
|
||||
cpos_list = list(range(lbound, rbound + 1))
|
||||
chunks = [cpos_list[i:i+chunk_size] for i in range(0, len(cpos_list), chunk_size)]
|
||||
del cpos_list
|
||||
|
||||
sub_s_attrs: List[CQiStructuralAttribute] = cqi_corpus.structural_attributes.list(filters={'part_of': s_attr})
|
||||
print(f's_attrs.{s_attr.name}.values')
|
||||
static_data['s_attrs'][s_attr.name]['values'] = [
|
||||
ent_ids = set()
|
||||
for chunk in chunks:
|
||||
# print(f'Gather ent_ids from cpos: {chunk[0]} - {chunk[-1]}')
|
||||
ent_ids.update({x for x in cqi_s_attrs['ent'].ids_by_cpos(chunk) if x != -1})
|
||||
static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['counts']['ent'] = len(ent_ids)
|
||||
del ent_ids
|
||||
s_ids = set()
|
||||
for chunk in chunks:
|
||||
# print(f'Gather s_ids from cpos: {chunk[0]} - {chunk[-1]}')
|
||||
s_ids.update({x for x in cqi_s_attrs['s'].ids_by_cpos(chunk) if x != -1})
|
||||
static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['counts']['s'] = len(s_ids)
|
||||
del s_ids
|
||||
# print(f's_attrs.{s_attr.name}.lexicon.{id}.freqs')
|
||||
static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['freqs'] = {}
|
||||
for p_attr in cqi_p_attrs.values():
|
||||
p_attr_ids = []
|
||||
for chunk in chunks:
|
||||
# print(f'Gather p_attr_ids from cpos: {chunk[0]} - {chunk[-1]}')
|
||||
p_attr_ids.extend(p_attr.ids_by_cpos(chunk))
|
||||
static_corpus_data['s_attrs'][s_attr.name]['lexicon'][id]['freqs'][p_attr.name] = dict(Counter(p_attr_ids))
|
||||
del p_attr_ids
|
||||
del chunks
|
||||
sub_s_attrs = cqi_corpus.structural_attributes.list(filters={'part_of': s_attr})
|
||||
s_attr_value_names: List[str] = [
|
||||
sub_s_attr.name[(len(s_attr.name) + 1):]
|
||||
for sub_s_attr in sub_s_attrs
|
||||
]
|
||||
s_attr_id_list: List[int] = list(range(s_attr.size))
|
||||
sub_s_attr_values: List[str] = []
|
||||
s_attr_id_list = list(range(s_attr.size))
|
||||
chunks = [s_attr_id_list[i:i+chunk_size] for i in range(0, len(s_attr_id_list), chunk_size)]
|
||||
del s_attr_id_list
|
||||
sub_s_attr_values = []
|
||||
for sub_s_attr in sub_s_attrs:
|
||||
tmp = []
|
||||
tmp.extend(sub_s_attr.values_by_ids(s_attr_id_list))
|
||||
for chunk in chunks:
|
||||
tmp.extend(sub_s_attr.values_by_ids(chunk))
|
||||
sub_s_attr_values.append(tmp)
|
||||
del tmp
|
||||
del s_attr_id_list
|
||||
print(f'values.s_attrs.{s_attr.name}')
|
||||
static_data['values']['s_attrs'][s_attr.name] = [
|
||||
{
|
||||
s_attr_value_name: sub_s_attr_values[s_attr_value_name_idx][s_attr_id]
|
||||
del chunks
|
||||
# print(f's_attrs.{s_attr.name}.values')
|
||||
static_corpus_data['s_attrs'][s_attr.name]['values'] = s_attr_value_names
|
||||
# print(f'values.s_attrs.{s_attr.name}')
|
||||
static_corpus_data['values']['s_attrs'][s_attr.name] = {
|
||||
s_attr_id: {
|
||||
s_attr_value_name: sub_s_attr_values[s_attr_value_name_idx][s_attr_id_idx]
|
||||
for s_attr_value_name_idx, s_attr_value_name in enumerate(
|
||||
static_data['s_attrs'][s_attr.name]['values']
|
||||
static_corpus_data['s_attrs'][s_attr.name]['values']
|
||||
)
|
||||
} for s_attr_id in range(0, s_attr.size)
|
||||
]
|
||||
} for s_attr_id_idx, s_attr_id in enumerate(range(0, s_attr.size))
|
||||
}
|
||||
del sub_s_attr_values
|
||||
print('Saving static data to file')
|
||||
with gzip.open(static_data_file_path, 'wt') as f:
|
||||
json.dump(static_data, f)
|
||||
del static_data
|
||||
print('Sending static data to client')
|
||||
with open(static_data_file_path, 'rb') as f:
|
||||
with gzip.open(cache_file_path, 'wt') as f:
|
||||
json.dump(static_corpus_data, f)
|
||||
del static_corpus_data
|
||||
with open(cache_file_path, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
|
@ -1,44 +1,46 @@
|
||||
from cqi.models.corpora import Corpus as CQiCorpus
|
||||
from cqi.models.subcorpora import Subcorpus as CQiSubcorpus
|
||||
from cqi.models.corpora import Corpus
|
||||
from cqi.models.subcorpora import Subcorpus
|
||||
from typing import Dict, List
|
||||
from app.models import Corpus
|
||||
|
||||
|
||||
def lookups_by_cpos(corpus: CQiCorpus, cpos_list: List[int]) -> Dict:
|
||||
def lookups_by_cpos(corpus: Corpus, cpos_list: List[int]) -> Dict:
|
||||
lookups = {}
|
||||
lookups['cpos_lookup'] = {cpos: {} for cpos in cpos_list}
|
||||
for attr in corpus.positional_attributes.list():
|
||||
cpos_attr_values: List[str] = attr.values_by_cpos(cpos_list)
|
||||
cpos_attr_values = attr.values_by_cpos(cpos_list)
|
||||
for i, cpos in enumerate(cpos_list):
|
||||
lookups['cpos_lookup'][cpos][attr.name] = cpos_attr_values[i]
|
||||
lookups['cpos_lookup'][cpos][attr.attrs['name']] = \
|
||||
cpos_attr_values[i]
|
||||
for attr in corpus.structural_attributes.list():
|
||||
# We only want to iterate over non subattributes, identifiable by
|
||||
# attr.has_values == False
|
||||
if attr.has_values:
|
||||
# attr.attrs['has_values'] == False
|
||||
if attr.attrs['has_values']:
|
||||
continue
|
||||
cpos_attr_ids: List[int] = attr.ids_by_cpos(cpos_list)
|
||||
cpos_attr_ids = attr.ids_by_cpos(cpos_list)
|
||||
for i, cpos in enumerate(cpos_list):
|
||||
if cpos_attr_ids[i] == -1:
|
||||
continue
|
||||
lookups['cpos_lookup'][cpos][attr.name] = cpos_attr_ids[i]
|
||||
lookups['cpos_lookup'][cpos][attr.attrs['name']] = cpos_attr_ids[i]
|
||||
occured_attr_ids = [x for x in set(cpos_attr_ids) if x != -1]
|
||||
if len(occured_attr_ids) == 0:
|
||||
if not occured_attr_ids:
|
||||
continue
|
||||
subattrs = corpus.structural_attributes.list(filters={'part_of': attr})
|
||||
if len(subattrs) == 0:
|
||||
if not subattrs:
|
||||
continue
|
||||
lookup_name: str = f'{attr.name}_lookup'
|
||||
lookup_name = f'{attr.attrs["name"]}_lookup'
|
||||
lookups[lookup_name] = {}
|
||||
for attr_id in occured_attr_ids:
|
||||
lookups[lookup_name][attr_id] = {}
|
||||
for subattr in subattrs:
|
||||
subattr_name = subattr.name[(len(attr.name) + 1):] # noqa
|
||||
subattr_name = subattr.attrs['name'][(len(attr.attrs['name']) + 1):] # noqa
|
||||
for i, subattr_value in enumerate(subattr.values_by_ids(occured_attr_ids)): # noqa
|
||||
lookups[lookup_name][occured_attr_ids[i]][subattr_name] = subattr_value # noqa
|
||||
return lookups
|
||||
|
||||
|
||||
def partial_export_subcorpus(
|
||||
subcorpus: CQiSubcorpus,
|
||||
subcorpus: Subcorpus,
|
||||
match_id_list: List[int],
|
||||
context: int = 25
|
||||
) -> Dict:
|
||||
@ -87,7 +89,7 @@ def partial_export_subcorpus(
|
||||
|
||||
|
||||
def export_subcorpus(
|
||||
subcorpus: CQiSubcorpus,
|
||||
subcorpus: Subcorpus,
|
||||
context: int = 25,
|
||||
cutoff: float = float('inf'),
|
||||
offset: int = 0
|
||||
|
@ -7,6 +7,7 @@ from flask import (
|
||||
url_for
|
||||
)
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
import os
|
||||
from app import db
|
||||
from app.models import Corpus, CorpusFile, CorpusStatus
|
||||
from ..decorators import corpus_follower_permission_required
|
||||
@ -91,8 +92,8 @@ def corpus_file(corpus_id, corpus_file_id):
|
||||
def download_corpus_file(corpus_id, corpus_file_id):
|
||||
corpus_file = CorpusFile.query.filter_by(corpus_id=corpus_id, id=corpus_file_id).first_or_404()
|
||||
return send_from_directory(
|
||||
corpus_file.path.parent,
|
||||
corpus_file.path.name,
|
||||
os.path.dirname(corpus_file.path),
|
||||
os.path.basename(corpus_file.path),
|
||||
as_attachment=True,
|
||||
attachment_filename=corpus_file.filename,
|
||||
mimetype=corpus_file.mimetype
|
||||
|
@ -12,65 +12,65 @@ from ..decorators import corpus_follower_permission_required
|
||||
from . import bp
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/followers', methods=['POST'])
|
||||
@corpus_follower_permission_required('MANAGE_FOLLOWERS')
|
||||
@content_negotiation(consumes='application/json', produces='application/json')
|
||||
def create_corpus_followers(corpus_id):
|
||||
usernames = request.json
|
||||
if not (isinstance(usernames, list) or all(isinstance(u, str) for u in usernames)):
|
||||
abort(400)
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
for username in usernames:
|
||||
user = User.query.filter_by(username=username, is_public=True).first_or_404()
|
||||
user.follow_corpus(corpus)
|
||||
db.session.commit()
|
||||
response_data = {
|
||||
'message': f'Users are now following "{corpus.title}"',
|
||||
'category': 'corpus'
|
||||
}
|
||||
return response_data, 200
|
||||
# @bp.route('/<hashid:corpus_id>/followers', methods=['POST'])
|
||||
# @corpus_follower_permission_required('MANAGE_FOLLOWERS')
|
||||
# @content_negotiation(consumes='application/json', produces='application/json')
|
||||
# def create_corpus_followers(corpus_id):
|
||||
# usernames = request.json
|
||||
# if not (isinstance(usernames, list) or all(isinstance(u, str) for u in usernames)):
|
||||
# abort(400)
|
||||
# corpus = Corpus.query.get_or_404(corpus_id)
|
||||
# for username in usernames:
|
||||
# user = User.query.filter_by(username=username, is_public=True).first_or_404()
|
||||
# user.follow_corpus(corpus)
|
||||
# db.session.commit()
|
||||
# response_data = {
|
||||
# 'message': f'Users are now following "{corpus.title}"',
|
||||
# 'category': 'corpus'
|
||||
# }
|
||||
# return response_data, 200
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/followers/<hashid:follower_id>/role', methods=['PUT'])
|
||||
@corpus_follower_permission_required('MANAGE_FOLLOWERS')
|
||||
@content_negotiation(consumes='application/json', produces='application/json')
|
||||
def update_corpus_follower_role(corpus_id, follower_id):
|
||||
role_name = request.json
|
||||
if not isinstance(role_name, str):
|
||||
abort(400)
|
||||
cfr = CorpusFollowerRole.query.filter_by(name=role_name).first()
|
||||
if cfr is None:
|
||||
abort(400)
|
||||
cfa = CorpusFollowerAssociation.query.filter_by(corpus_id=corpus_id, follower_id=follower_id).first_or_404()
|
||||
cfa.role = cfr
|
||||
db.session.commit()
|
||||
response_data = {
|
||||
'message': f'User "{cfa.follower.username}" is now {cfa.role.name}',
|
||||
'category': 'corpus'
|
||||
}
|
||||
return response_data, 200
|
||||
# @bp.route('/<hashid:corpus_id>/followers/<hashid:follower_id>/role', methods=['PUT'])
|
||||
# @corpus_follower_permission_required('MANAGE_FOLLOWERS')
|
||||
# @content_negotiation(consumes='application/json', produces='application/json')
|
||||
# def update_corpus_follower_role(corpus_id, follower_id):
|
||||
# role_name = request.json
|
||||
# if not isinstance(role_name, str):
|
||||
# abort(400)
|
||||
# cfr = CorpusFollowerRole.query.filter_by(name=role_name).first()
|
||||
# if cfr is None:
|
||||
# abort(400)
|
||||
# cfa = CorpusFollowerAssociation.query.filter_by(corpus_id=corpus_id, follower_id=follower_id).first_or_404()
|
||||
# cfa.role = cfr
|
||||
# db.session.commit()
|
||||
# response_data = {
|
||||
# 'message': f'User "{cfa.follower.username}" is now {cfa.role.name}',
|
||||
# 'category': 'corpus'
|
||||
# }
|
||||
# return response_data, 200
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/followers/<hashid:follower_id>', methods=['DELETE'])
|
||||
def delete_corpus_follower(corpus_id, follower_id):
|
||||
cfa = CorpusFollowerAssociation.query.filter_by(corpus_id=corpus_id, follower_id=follower_id).first_or_404()
|
||||
if not (
|
||||
current_user.id == follower_id
|
||||
or current_user == cfa.corpus.user
|
||||
or CorpusFollowerAssociation.query.filter_by(corpus_id=corpus_id, follower_id=current_user.id).first().role.has_permission('MANAGE_FOLLOWERS')
|
||||
or current_user.is_administrator()):
|
||||
abort(403)
|
||||
if current_user.id == follower_id:
|
||||
flash(f'You are no longer following "{cfa.corpus.title}"', 'corpus')
|
||||
response = make_response()
|
||||
response.status_code = 204
|
||||
else:
|
||||
response_data = {
|
||||
'message': f'"{cfa.follower.username}" is not following "{cfa.corpus.title}" anymore',
|
||||
'category': 'corpus'
|
||||
}
|
||||
response = jsonify(response_data)
|
||||
response.status_code = 200
|
||||
cfa.follower.unfollow_corpus(cfa.corpus)
|
||||
db.session.commit()
|
||||
return response
|
||||
# @bp.route('/<hashid:corpus_id>/followers/<hashid:follower_id>', methods=['DELETE'])
|
||||
# def delete_corpus_follower(corpus_id, follower_id):
|
||||
# cfa = CorpusFollowerAssociation.query.filter_by(corpus_id=corpus_id, follower_id=follower_id).first_or_404()
|
||||
# if not (
|
||||
# current_user.id == follower_id
|
||||
# or current_user == cfa.corpus.user
|
||||
# or CorpusFollowerAssociation.query.filter_by(corpus_id=corpus_id, follower_id=current_user.id).first().role.has_permission('MANAGE_FOLLOWERS')
|
||||
# or current_user.is_administrator()):
|
||||
# abort(403)
|
||||
# if current_user.id == follower_id:
|
||||
# flash(f'You are no longer following "{cfa.corpus.title}"', 'corpus')
|
||||
# response = make_response()
|
||||
# response.status_code = 204
|
||||
# else:
|
||||
# response_data = {
|
||||
# 'message': f'"{cfa.follower.username}" is not following "{cfa.corpus.title}" anymore',
|
||||
# 'category': 'corpus'
|
||||
# }
|
||||
# response = jsonify(response_data)
|
||||
# response.status_code = 200
|
||||
# cfa.follower.unfollow_corpus(cfa.corpus)
|
||||
# db.session.commit()
|
||||
# return response
|
||||
|
@ -61,7 +61,7 @@ def build_corpus(corpus_id):
|
||||
@bp.route('/stopwords')
|
||||
@content_negotiation(produces='application/json')
|
||||
def get_stopwords():
|
||||
nltk.download('stopwords', quiet=True)
|
||||
nltk.download('stopwords')
|
||||
languages = ["german", "english", "catalan", "greek", "spanish", "french", "italian", "russian", "chinese"]
|
||||
stopwords = {}
|
||||
for language in languages:
|
||||
@ -71,55 +71,55 @@ def get_stopwords():
|
||||
response_data = stopwords
|
||||
return response_data, 202
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/generate-share-link', methods=['POST'])
|
||||
@corpus_follower_permission_required('MANAGE_FOLLOWERS')
|
||||
@content_negotiation(consumes='application/json', produces='application/json')
|
||||
def generate_corpus_share_link(corpus_id):
|
||||
data = request.json
|
||||
if not isinstance(data, dict):
|
||||
abort(400)
|
||||
expiration = data.get('expiration')
|
||||
if not isinstance(expiration, str):
|
||||
abort(400)
|
||||
role_name = data.get('role')
|
||||
if not isinstance(role_name, str):
|
||||
abort(400)
|
||||
expiration_date = datetime.strptime(expiration, '%b %d, %Y')
|
||||
cfr = CorpusFollowerRole.query.filter_by(name=role_name).first()
|
||||
if cfr is None:
|
||||
abort(400)
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
token = current_user.generate_follow_corpus_token(corpus.hashid, role_name, expiration_date)
|
||||
corpus_share_link = url_for(
|
||||
'corpora.follow_corpus',
|
||||
corpus_id=corpus_id,
|
||||
token=token,
|
||||
_external=True
|
||||
)
|
||||
response_data = {
|
||||
'message': 'Corpus share link generated',
|
||||
'category': 'corpus',
|
||||
'corpusShareLink': corpus_share_link
|
||||
}
|
||||
return response_data, 200
|
||||
# @bp.route('/<hashid:corpus_id>/generate-share-link', methods=['POST'])
|
||||
# @corpus_follower_permission_required('MANAGE_FOLLOWERS')
|
||||
# @content_negotiation(consumes='application/json', produces='application/json')
|
||||
# def generate_corpus_share_link(corpus_id):
|
||||
# data = request.json
|
||||
# if not isinstance(data, dict):
|
||||
# abort(400)
|
||||
# expiration = data.get('expiration')
|
||||
# if not isinstance(expiration, str):
|
||||
# abort(400)
|
||||
# role_name = data.get('role')
|
||||
# if not isinstance(role_name, str):
|
||||
# abort(400)
|
||||
# expiration_date = datetime.strptime(expiration, '%b %d, %Y')
|
||||
# cfr = CorpusFollowerRole.query.filter_by(name=role_name).first()
|
||||
# if cfr is None:
|
||||
# abort(400)
|
||||
# corpus = Corpus.query.get_or_404(corpus_id)
|
||||
# token = current_user.generate_follow_corpus_token(corpus.hashid, role_name, expiration_date)
|
||||
# corpus_share_link = url_for(
|
||||
# 'corpora.follow_corpus',
|
||||
# corpus_id=corpus_id,
|
||||
# token=token,
|
||||
# _external=True
|
||||
# )
|
||||
# response_data = {
|
||||
# 'message': 'Corpus share link generated',
|
||||
# 'category': 'corpus',
|
||||
# 'corpusShareLink': corpus_share_link
|
||||
# }
|
||||
# return response_data, 200
|
||||
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/is_public', methods=['PUT'])
|
||||
@corpus_owner_or_admin_required
|
||||
@content_negotiation(consumes='application/json', produces='application/json')
|
||||
def update_corpus_is_public(corpus_id):
|
||||
is_public = request.json
|
||||
if not isinstance(is_public, bool):
|
||||
abort(400)
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
corpus.is_public = is_public
|
||||
db.session.commit()
|
||||
response_data = {
|
||||
'message': (
|
||||
f'Corpus "{corpus.title}" is now'
|
||||
f' {"public" if is_public else "private"}'
|
||||
),
|
||||
'category': 'corpus'
|
||||
}
|
||||
return response_data, 200
|
||||
# @bp.route('/<hashid:corpus_id>/is_public', methods=['PUT'])
|
||||
# @corpus_owner_or_admin_required
|
||||
# @content_negotiation(consumes='application/json', produces='application/json')
|
||||
# def update_corpus_is_public(corpus_id):
|
||||
# is_public = request.json
|
||||
# if not isinstance(is_public, bool):
|
||||
# abort(400)
|
||||
# corpus = Corpus.query.get_or_404(corpus_id)
|
||||
# corpus.is_public = is_public
|
||||
# db.session.commit()
|
||||
# response_data = {
|
||||
# 'message': (
|
||||
# f'Corpus "{corpus.title}" is now'
|
||||
# f' {"public" if is_public else "private"}'
|
||||
# ),
|
||||
# 'category': 'corpus'
|
||||
# }
|
||||
# return response_data, 200
|
||||
|
@ -68,19 +68,20 @@ def corpus(corpus_id):
|
||||
corpus=corpus,
|
||||
cfr=cfr,
|
||||
cfrs=cfrs,
|
||||
users=users
|
||||
users = users
|
||||
)
|
||||
if (current_user.is_following_corpus(corpus) or corpus.is_public):
|
||||
cfas = CorpusFollowerAssociation.query.filter(Corpus.id == corpus_id, CorpusFollowerAssociation.follower_id != corpus.user.id).all()
|
||||
return render_template(
|
||||
'corpora/public_corpus.html.j2',
|
||||
title=corpus.title,
|
||||
corpus=corpus,
|
||||
cfrs=cfrs,
|
||||
cfr=cfr,
|
||||
cfas=cfas,
|
||||
users=users
|
||||
)
|
||||
abort(404)
|
||||
# cfas = CorpusFollowerAssociation.query.filter(Corpus.id == corpus_id, CorpusFollowerAssociation.follower_id != corpus.user.id).all()
|
||||
# return render_template(
|
||||
# 'corpora/public_corpus.html.j2',
|
||||
# title=corpus.title,
|
||||
# corpus=corpus,
|
||||
# cfrs=cfrs,
|
||||
# cfr=cfr,
|
||||
# cfas=cfas,
|
||||
# users = users
|
||||
# )
|
||||
abort(403)
|
||||
|
||||
|
||||
@ -97,14 +98,14 @@ def analysis(corpus_id):
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/<hashid:corpus_id>/follow/<token>')
|
||||
def follow_corpus(corpus_id, token):
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if current_user.follow_corpus_by_token(token):
|
||||
db.session.commit()
|
||||
flash(f'You are following "{corpus.title}" now', category='corpus')
|
||||
return redirect(url_for('corpora.corpus', corpus_id=corpus_id))
|
||||
abort(403)
|
||||
# @bp.route('/<hashid:corpus_id>/follow/<token>')
|
||||
# def follow_corpus(corpus_id, token):
|
||||
# corpus = Corpus.query.get_or_404(corpus_id)
|
||||
# if current_user.follow_corpus_by_token(token):
|
||||
# db.session.commit()
|
||||
# flash(f'You are following "{corpus.title}" now', category='corpus')
|
||||
# return redirect(url_for('corpora.corpus', corpus_id=corpus_id))
|
||||
# abort(403)
|
||||
|
||||
|
||||
@bp.route('/import', methods=['GET', 'POST'])
|
||||
|
@ -45,7 +45,7 @@ def _create_build_corpus_service(corpus):
|
||||
''' ## Constraints ## '''
|
||||
constraints = ['node.role==worker']
|
||||
''' ## Image ## '''
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cwb:r1879'
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cwb:r1702'
|
||||
''' ## Labels ## '''
|
||||
labels = {
|
||||
'origin': current_app.config['SERVER_NAME'],
|
||||
@ -139,11 +139,11 @@ def _create_cqpserver_container(corpus):
|
||||
''' ## Entrypoint ## '''
|
||||
entrypoint = ['bash', '-c']
|
||||
''' ## Image ## '''
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cwb:r1879'
|
||||
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cwb:r1702'
|
||||
''' ## Name ## '''
|
||||
name = f'cqpserver_{corpus.id}'
|
||||
''' ## Network ## '''
|
||||
network = f'{current_app.config["NOPAQUE_DOCKER_NETWORK_NAME"]}'
|
||||
network = f'{current_app.config["DOCKER_NETWORK_NAME"]}'
|
||||
''' ## Volumes ## '''
|
||||
volumes = []
|
||||
''' ### Corpus data volume ### '''
|
||||
|
@ -1,2 +0,0 @@
|
||||
from .container_column import ContainerColumn
|
||||
from .int_enum_column import IntEnumColumn
|
@ -1,21 +0,0 @@
|
||||
import json
|
||||
from app import db
|
||||
|
||||
|
||||
class ContainerColumn(db.TypeDecorator):
|
||||
impl = db.String
|
||||
|
||||
def __init__(self, container_type, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.container_type = container_type
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if isinstance(value, self.container_type):
|
||||
return json.dumps(value)
|
||||
elif isinstance(value, str) and isinstance(json.loads(value), self.container_type):
|
||||
return value
|
||||
else:
|
||||
return TypeError()
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
return json.loads(value)
|
@ -1,22 +0,0 @@
|
||||
from app import db
|
||||
|
||||
|
||||
class IntEnumColumn(db.TypeDecorator):
|
||||
impl = db.Integer
|
||||
|
||||
def __init__(self, enum_type, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.enum_type = enum_type
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if isinstance(value, self.enum_type) and isinstance(value.value, int):
|
||||
return value.value
|
||||
elif isinstance(value, int):
|
||||
return self.enum_type(value).value
|
||||
elif isinstance(value, str):
|
||||
return self.enum_type[value].value
|
||||
else:
|
||||
return TypeError()
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
return self.enum_type(value)
|
@ -1,6 +1,7 @@
|
||||
from flask import abort, current_app
|
||||
from flask_login import current_user
|
||||
from threading import Thread
|
||||
import os
|
||||
from app import db
|
||||
from app.decorators import admin_required, content_negotiation
|
||||
from app.models import Job, JobStatus
|
||||
@ -38,7 +39,7 @@ def job_log(job_id):
|
||||
if job.status not in [JobStatus.COMPLETED, JobStatus.FAILED]:
|
||||
response = {'errors': {'message': 'Job status is not completed or failed'}}
|
||||
return response, 409
|
||||
with open(job.path / 'pipeline_data' / 'logs' / 'pyflow_log.txt') as log_file:
|
||||
with open(os.path.join(job.path, 'pipeline_data', 'logs', 'pyflow_log.txt')) as log_file:
|
||||
log = log_file.read()
|
||||
response_data = {
|
||||
'jobLog': log
|
||||
|
@ -7,6 +7,7 @@ from flask import (
|
||||
)
|
||||
from flask_breadcrumbs import register_breadcrumb
|
||||
from flask_login import current_user
|
||||
import os
|
||||
from app.models import Job, JobInput, JobResult
|
||||
from . import bp
|
||||
from .utils import job_dynamic_list_constructor as job_dlc
|
||||
@ -37,8 +38,8 @@ def download_job_input(job_id, job_input_id):
|
||||
if not (job_input.job.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return send_from_directory(
|
||||
job_input.path.parent,
|
||||
job_input.path.name,
|
||||
os.path.dirname(job_input.path),
|
||||
os.path.basename(job_input.path),
|
||||
as_attachment=True,
|
||||
attachment_filename=job_input.filename,
|
||||
mimetype=job_input.mimetype
|
||||
@ -51,8 +52,8 @@ def download_job_result(job_id, job_result_id):
|
||||
if not (job_result.job.user == current_user or current_user.is_administrator()):
|
||||
abort(403)
|
||||
return send_from_directory(
|
||||
job_result.path.parent,
|
||||
job_result.path.name,
|
||||
os.path.dirname(job_result.path),
|
||||
os.path.basename(job_result.path),
|
||||
as_attachment=True,
|
||||
attachment_filename=job_result.filename,
|
||||
mimetype=job_result.mimetype
|
||||
|
@ -1,7 +1,6 @@
|
||||
from flask import current_app
|
||||
from flask_migrate import upgrade
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
import os
|
||||
from app.models import (
|
||||
CorpusFollowerRole,
|
||||
Role,
|
||||
@ -18,15 +17,16 @@ def deploy():
|
||||
# Make default directories
|
||||
print('Make default directories')
|
||||
base_dir = current_app.config['NOPAQUE_DATA_DIR']
|
||||
default_dirs: List[Path] = [
|
||||
base_dir / 'tmp',
|
||||
base_dir / 'users'
|
||||
default_dirs = [
|
||||
os.path.join(base_dir, 'tmp'),
|
||||
os.path.join(base_dir, 'users')
|
||||
]
|
||||
for default_dir in default_dirs:
|
||||
if not default_dir.exists():
|
||||
default_dir.mkdir()
|
||||
if not default_dir.is_dir():
|
||||
raise NotADirectoryError(f'{default_dir} is not a directory')
|
||||
for dir in default_dirs:
|
||||
if os.path.exists(dir):
|
||||
if not os.path.isdir(dir):
|
||||
raise NotADirectoryError(f'{dir} is not a directory')
|
||||
else:
|
||||
os.mkdir(dir)
|
||||
|
||||
# migrate database to latest revision
|
||||
print('Migrate database to latest revision')
|
||||
@ -43,5 +43,3 @@ def deploy():
|
||||
SpaCyNLPPipelineModel.insert_defaults()
|
||||
print('Insert/Update default TesseractOCRPipelineModels')
|
||||
TesseractOCRPipelineModel.insert_defaults()
|
||||
|
||||
# TODO: Implement checks for if the nopaque network exists
|
||||
|
@ -45,6 +45,12 @@ def dashboard():
|
||||
)
|
||||
|
||||
|
||||
# @bp.route('/user_manual')
|
||||
# @register_breadcrumb(bp, '.user_manual', '<i class="material-icons left">help</i>User manual')
|
||||
# def user_manual():
|
||||
# return render_template('main/user_manual.html.j2', title='User manual')
|
||||
|
||||
|
||||
@bp.route('/news')
|
||||
@register_breadcrumb(bp, '.news', '<i class="material-icons left">email</i>News')
|
||||
def news():
|
||||
@ -72,17 +78,15 @@ def terms_of_use():
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/social-area')
|
||||
@register_breadcrumb(bp, '.social_area', '<i class="material-icons left">group</i>Social Area')
|
||||
@login_required
|
||||
def social_area():
|
||||
print('test')
|
||||
corpora = Corpus.query.filter(Corpus.is_public == True, Corpus.user != current_user).all()
|
||||
print(corpora)
|
||||
users = User.query.filter(User.is_public == True, User.id != current_user.id).all()
|
||||
return render_template(
|
||||
'main/social_area.html.j2',
|
||||
title='Social Area',
|
||||
corpora=corpora,
|
||||
users=users
|
||||
)
|
||||
# @bp.route('/social-area')
|
||||
# @register_breadcrumb(bp, '.social_area', '<i class="material-icons left">group</i>Social Area')
|
||||
# @login_required
|
||||
# def social_area():
|
||||
# corpora = Corpus.query.filter(Corpus.is_public == True, Corpus.user != current_user).all()
|
||||
# users = User.query.filter(User.is_public == True, User.id != current_user.id).all()
|
||||
# return render_template(
|
||||
# 'main/social_area.html.j2',
|
||||
# title='Social Area',
|
||||
# corpora=corpora,
|
||||
# users=users
|
||||
# )
|
||||
|
1815
app/models.py
Normal file
@ -1,19 +0,0 @@
|
||||
from .avatar import *
|
||||
from .corpus_file import *
|
||||
from .corpus_follower_association import *
|
||||
from .corpus_follower_role import *
|
||||
from .corpus import *
|
||||
from .job_input import *
|
||||
from .job_result import *
|
||||
from .job import *
|
||||
from .role import *
|
||||
from .spacy_nlp_pipeline_model import *
|
||||
from .tesseract_ocr_pipeline_model import *
|
||||
from .token import *
|
||||
from .user import *
|
||||
from app import login
|
||||
|
||||
|
||||
@login.user_loader
|
||||
def load_user(user_id):
|
||||
return User.query.get(int(user_id))
|
@ -1,40 +0,0 @@
|
||||
from flask import current_app
|
||||
from flask_hashids import HashidMixin
|
||||
from pathlib import Path
|
||||
from app import db
|
||||
from .file_mixin import FileMixin
|
||||
|
||||
|
||||
class Avatar(HashidMixin, FileMixin, db.Model):
|
||||
__tablename__ = 'avatars'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
# Relationships
|
||||
user = db.relationship('User', back_populates='avatar')
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.user.path / 'avatar'
|
||||
# return os.path.join(self.user.path, 'avatar')
|
||||
|
||||
def delete(self):
|
||||
try:
|
||||
self.path.unlink(missing_ok=True)
|
||||
except OSError as e:
|
||||
current_app.logger.error(e)
|
||||
raise
|
||||
db.session.delete(self)
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
**self.file_mixin_to_json_serializeable()
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['user'] = \
|
||||
self.user.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
pass
|
||||
return json_serializeable
|
@ -1,200 +0,0 @@
|
||||
from datetime import datetime
|
||||
from enum import IntEnum
|
||||
from flask import current_app, url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from typing import Union
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import xml.etree.ElementTree as ET
|
||||
from app import db
|
||||
from app.converters.vrt import normalize_vrt_file
|
||||
from app.ext.flask_sqlalchemy import IntEnumColumn
|
||||
from .corpus_follower_association import CorpusFollowerAssociation
|
||||
|
||||
|
||||
class CorpusStatus(IntEnum):
|
||||
UNPREPARED = 1
|
||||
SUBMITTED = 2
|
||||
QUEUED = 3
|
||||
BUILDING = 4
|
||||
BUILT = 5
|
||||
FAILED = 6
|
||||
STARTING_ANALYSIS_SESSION = 7
|
||||
RUNNING_ANALYSIS_SESSION = 8
|
||||
CANCELING_ANALYSIS_SESSION = 9
|
||||
|
||||
@staticmethod
|
||||
def get(corpus_status: Union['CorpusStatus', int, str]) -> 'CorpusStatus':
|
||||
if isinstance(corpus_status, CorpusStatus):
|
||||
return corpus_status
|
||||
if isinstance(corpus_status, int):
|
||||
return CorpusStatus(corpus_status)
|
||||
if isinstance(corpus_status, str):
|
||||
return CorpusStatus[corpus_status]
|
||||
raise TypeError('corpus_status must be CorpusStatus, int, or str')
|
||||
|
||||
|
||||
class Corpus(HashidMixin, db.Model):
|
||||
'''
|
||||
Class to define a corpus.
|
||||
'''
|
||||
__tablename__ = 'corpora'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
# Fields
|
||||
creation_date = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
description = db.Column(db.String(255))
|
||||
status = db.Column(
|
||||
IntEnumColumn(CorpusStatus),
|
||||
default=CorpusStatus.UNPREPARED
|
||||
)
|
||||
title = db.Column(db.String(32))
|
||||
num_analysis_sessions = db.Column(db.Integer, default=0)
|
||||
num_tokens = db.Column(db.Integer, default=0)
|
||||
is_public = db.Column(db.Boolean, default=False)
|
||||
# Relationships
|
||||
files = db.relationship(
|
||||
'CorpusFile',
|
||||
back_populates='corpus',
|
||||
lazy='dynamic',
|
||||
cascade='all, delete-orphan'
|
||||
)
|
||||
corpus_follower_associations = db.relationship(
|
||||
'CorpusFollowerAssociation',
|
||||
back_populates='corpus',
|
||||
cascade='all, delete-orphan'
|
||||
)
|
||||
followers = association_proxy(
|
||||
'corpus_follower_associations',
|
||||
'follower',
|
||||
creator=lambda u: CorpusFollowerAssociation(follower=u)
|
||||
)
|
||||
user = db.relationship('User', back_populates='corpora')
|
||||
# "static" attributes
|
||||
max_num_tokens = 2_147_483_647
|
||||
|
||||
def __repr__(self):
|
||||
return f'<Corpus {self.title}>'
|
||||
|
||||
@property
|
||||
def analysis_url(self):
|
||||
return url_for('corpora.analysis', corpus_id=self.id)
|
||||
|
||||
@property
|
||||
def jsonpatch_path(self):
|
||||
return f'{self.user.jsonpatch_path}/corpora/{self.hashid}'
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.user.path / 'corpora' / f'{self.id}'
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for('corpora.corpus', corpus_id=self.id)
|
||||
|
||||
@property
|
||||
def user_hashid(self):
|
||||
return self.user.hashid
|
||||
|
||||
@staticmethod
|
||||
def create(**kwargs):
|
||||
corpus = Corpus(**kwargs)
|
||||
db.session.add(corpus)
|
||||
db.session.flush(objects=[corpus])
|
||||
db.session.refresh(corpus)
|
||||
corpus_files_dir = corpus.path / 'files'
|
||||
corpus_cwb_dir = corpus.path / 'cwb'
|
||||
corpus_cwb_data_dir = corpus_cwb_dir / 'data'
|
||||
corpus_cwb_registry_dir = corpus_cwb_dir / 'registry'
|
||||
try:
|
||||
corpus.path.mkdir()
|
||||
corpus_files_dir.mkdir()
|
||||
corpus_cwb_dir.mkdir()
|
||||
corpus_cwb_data_dir.mkdir()
|
||||
corpus_cwb_registry_dir.mkdir()
|
||||
except OSError as e:
|
||||
# TODO: Potential leftover cleanup
|
||||
current_app.logger.error(e)
|
||||
db.session.rollback()
|
||||
raise
|
||||
return corpus
|
||||
|
||||
def build(self):
|
||||
corpus_cwb_dir = self.path / 'cwb'
|
||||
corpus_cwb_data_dir = corpus_cwb_dir / 'data'
|
||||
corpus_cwb_registry_dir = corpus_cwb_dir / 'registry'
|
||||
try:
|
||||
shutil.rmtree(corpus_cwb_dir, ignore_errors=True)
|
||||
corpus_cwb_dir.mkdir()
|
||||
corpus_cwb_data_dir.mkdir()
|
||||
corpus_cwb_registry_dir.mkdir()
|
||||
except OSError as e:
|
||||
current_app.logger.error(e)
|
||||
self.status = CorpusStatus.FAILED
|
||||
raise
|
||||
corpus_element = ET.fromstring('<corpus>\n</corpus>')
|
||||
for corpus_file in self.files:
|
||||
normalized_vrt_path = corpus_cwb_dir / f'{corpus_file.id}.norm.vrt'
|
||||
try:
|
||||
normalize_vrt_file(corpus_file.path, normalized_vrt_path)
|
||||
except:
|
||||
self.status = CorpusStatus.FAILED
|
||||
return
|
||||
element_tree = ET.parse(normalized_vrt_path)
|
||||
text_element = element_tree.getroot()
|
||||
text_element.set('author', corpus_file.author)
|
||||
text_element.set('title', corpus_file.title)
|
||||
text_element.set(
|
||||
'publishing_year',
|
||||
f'{corpus_file.publishing_year}'
|
||||
)
|
||||
text_element.set('address', corpus_file.address or 'NULL')
|
||||
text_element.set('booktitle', corpus_file.booktitle or 'NULL')
|
||||
text_element.set('chapter', corpus_file.chapter or 'NULL')
|
||||
text_element.set('editor', corpus_file.editor or 'NULL')
|
||||
text_element.set('institution', corpus_file.institution or 'NULL')
|
||||
text_element.set('journal', corpus_file.journal or 'NULL')
|
||||
text_element.set('pages', f'{corpus_file.pages}' or 'NULL')
|
||||
text_element.set('publisher', corpus_file.publisher or 'NULL')
|
||||
text_element.set('school', corpus_file.school or 'NULL')
|
||||
text_element.tail = '\n'
|
||||
# corpus_element.insert(1, text_element)
|
||||
corpus_element.append(text_element)
|
||||
ET.ElementTree(corpus_element).write(
|
||||
corpus_cwb_dir / 'corpus.vrt',
|
||||
encoding='utf-8'
|
||||
)
|
||||
self.status = CorpusStatus.SUBMITTED
|
||||
|
||||
def delete(self):
|
||||
shutil.rmtree(self.path, ignore_errors=True)
|
||||
db.session.delete(self)
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'creation_date': f'{self.creation_date.isoformat()}Z',
|
||||
'description': self.description,
|
||||
'max_num_tokens': self.max_num_tokens,
|
||||
'num_analysis_sessions': self.num_analysis_sessions,
|
||||
'num_tokens': self.num_tokens,
|
||||
'status': self.status.name,
|
||||
'title': self.title,
|
||||
'is_public': self.is_public
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['user'] = \
|
||||
self.user.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
json_serializeable['corpus_follower_associations'] = {
|
||||
x.hashid: x.to_json_serializeable()
|
||||
for x in self.corpus_follower_associations
|
||||
}
|
||||
json_serializeable['files'] = {
|
||||
x.hashid: x.to_json_serializeable(relationships=True)
|
||||
for x in self.files
|
||||
}
|
||||
return json_serializeable
|
@ -1,102 +0,0 @@
|
||||
from flask import current_app, url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from pathlib import Path
|
||||
from app import db
|
||||
from .corpus import CorpusStatus
|
||||
from .file_mixin import FileMixin
|
||||
|
||||
|
||||
class CorpusFile(FileMixin, HashidMixin, db.Model):
|
||||
__tablename__ = 'corpus_files'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
corpus_id = db.Column(db.Integer, db.ForeignKey('corpora.id'))
|
||||
# Fields
|
||||
author = db.Column(db.String(255))
|
||||
description = db.Column(db.String(255))
|
||||
publishing_year = db.Column(db.Integer)
|
||||
title = db.Column(db.String(255))
|
||||
address = db.Column(db.String(255))
|
||||
booktitle = db.Column(db.String(255))
|
||||
chapter = db.Column(db.String(255))
|
||||
editor = db.Column(db.String(255))
|
||||
institution = db.Column(db.String(255))
|
||||
journal = db.Column(db.String(255))
|
||||
pages = db.Column(db.String(255))
|
||||
publisher = db.Column(db.String(255))
|
||||
school = db.Column(db.String(255))
|
||||
# Relationships
|
||||
corpus = db.relationship(
|
||||
'Corpus',
|
||||
back_populates='files'
|
||||
)
|
||||
|
||||
@property
|
||||
def download_url(self):
|
||||
return url_for(
|
||||
'corpora.download_corpus_file',
|
||||
corpus_id=self.corpus_id,
|
||||
corpus_file_id=self.id
|
||||
)
|
||||
|
||||
@property
|
||||
def jsonpatch_path(self):
|
||||
return f'{self.corpus.jsonpatch_path}/files/{self.hashid}'
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.corpus.path / 'files' / f'{self.id}'
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for(
|
||||
'corpora.corpus_file',
|
||||
corpus_id=self.corpus_id,
|
||||
corpus_file_id=self.id
|
||||
)
|
||||
|
||||
@property
|
||||
def user_hashid(self):
|
||||
return self.corpus.user.hashid
|
||||
|
||||
@property
|
||||
def user_id(self):
|
||||
return self.corpus.user_id
|
||||
|
||||
def delete(self):
|
||||
try:
|
||||
self.path.unlink(missing_ok=True)
|
||||
except OSError as e:
|
||||
current_app.logger.error(e)
|
||||
raise
|
||||
db.session.delete(self)
|
||||
self.corpus.status = CorpusStatus.UNPREPARED
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'address': self.address,
|
||||
'author': self.author,
|
||||
'description': self.description,
|
||||
'booktitle': self.booktitle,
|
||||
'chapter': self.chapter,
|
||||
'editor': self.editor,
|
||||
'institution': self.institution,
|
||||
'journal': self.journal,
|
||||
'pages': self.pages,
|
||||
'publisher': self.publisher,
|
||||
'publishing_year': self.publishing_year,
|
||||
'school': self.school,
|
||||
'title': self.title,
|
||||
**self.file_mixin_to_json_serializeable(
|
||||
backrefs=backrefs,
|
||||
relationships=relationships
|
||||
)
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['corpus'] = \
|
||||
self.corpus.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
pass
|
||||
return json_serializeable
|
@ -1,47 +0,0 @@
|
||||
from flask_hashids import HashidMixin
|
||||
from app import db
|
||||
from .corpus_follower_role import CorpusFollowerRole
|
||||
|
||||
|
||||
class CorpusFollowerAssociation(HashidMixin, db.Model):
|
||||
__tablename__ = 'corpus_follower_associations'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
corpus_id = db.Column(db.Integer, db.ForeignKey('corpora.id'))
|
||||
follower_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
role_id = db.Column(db.Integer, db.ForeignKey('corpus_follower_roles.id'))
|
||||
# Relationships
|
||||
corpus = db.relationship(
|
||||
'Corpus',
|
||||
back_populates='corpus_follower_associations'
|
||||
)
|
||||
follower = db.relationship(
|
||||
'User',
|
||||
back_populates='corpus_follower_associations'
|
||||
)
|
||||
role = db.relationship(
|
||||
'CorpusFollowerRole',
|
||||
back_populates='corpus_follower_associations'
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if 'role' not in kwargs:
|
||||
kwargs['role'] = CorpusFollowerRole.query.filter_by(default=True).first()
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def __repr__(self):
|
||||
return f'<CorpusFollowerAssociation {self.follower.__repr__()} ~ {self.role.__repr__()} ~ {self.corpus.__repr__()}>'
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'corpus': self.corpus.to_json_serializeable(backrefs=True),
|
||||
'follower': self.follower.to_json_serializeable(),
|
||||
'role': self.role.to_json_serializeable()
|
||||
}
|
||||
if backrefs:
|
||||
pass
|
||||
if relationships:
|
||||
pass
|
||||
return json_serializeable
|
@ -1,107 +0,0 @@
|
||||
from flask_hashids import HashidMixin
|
||||
from enum import IntEnum
|
||||
from typing import Union
|
||||
from app import db
|
||||
|
||||
|
||||
class CorpusFollowerPermission(IntEnum):
|
||||
VIEW = 1
|
||||
MANAGE_FILES = 2
|
||||
MANAGE_FOLLOWERS = 4
|
||||
MANAGE_CORPUS = 8
|
||||
|
||||
@staticmethod
|
||||
def get(corpus_follower_permission: Union['CorpusFollowerPermission', int, str]) -> 'CorpusFollowerPermission':
|
||||
if isinstance(corpus_follower_permission, CorpusFollowerPermission):
|
||||
return corpus_follower_permission
|
||||
if isinstance(corpus_follower_permission, int):
|
||||
return CorpusFollowerPermission(corpus_follower_permission)
|
||||
if isinstance(corpus_follower_permission, str):
|
||||
return CorpusFollowerPermission[corpus_follower_permission]
|
||||
raise TypeError('corpus_follower_permission must be CorpusFollowerPermission, int, or str')
|
||||
|
||||
|
||||
class CorpusFollowerRole(HashidMixin, db.Model):
|
||||
__tablename__ = 'corpus_follower_roles'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Fields
|
||||
name = db.Column(db.String(64), unique=True)
|
||||
default = db.Column(db.Boolean, default=False, index=True)
|
||||
permissions = db.Column(db.Integer, default=0)
|
||||
# Relationships
|
||||
corpus_follower_associations = db.relationship(
|
||||
'CorpusFollowerAssociation',
|
||||
back_populates='role'
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f'<CorpusFollowerRole {self.name}>'
|
||||
|
||||
def has_permission(self, permission: Union[CorpusFollowerPermission, int, str]):
|
||||
perm = CorpusFollowerPermission.get(permission)
|
||||
return self.permissions & perm.value == perm.value
|
||||
|
||||
def add_permission(self, permission: Union[CorpusFollowerPermission, int, str]):
|
||||
perm = CorpusFollowerPermission.get(permission)
|
||||
if not self.has_permission(perm):
|
||||
self.permissions += perm.value
|
||||
|
||||
def remove_permission(self, permission: Union[CorpusFollowerPermission, int, str]):
|
||||
perm = CorpusFollowerPermission.get(permission)
|
||||
if self.has_permission(perm):
|
||||
self.permissions -= perm.value
|
||||
|
||||
def reset_permissions(self):
|
||||
self.permissions = 0
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'default': self.default,
|
||||
'name': self.name,
|
||||
'permissions': [
|
||||
x.name
|
||||
for x in CorpusFollowerPermission
|
||||
if self.has_permission(x)
|
||||
]
|
||||
}
|
||||
if backrefs:
|
||||
pass
|
||||
if relationships:
|
||||
json_serializeable['corpus_follower_association'] = {
|
||||
x.hashid: x.to_json_serializeable(relationships=True)
|
||||
for x in self.corpus_follower_association
|
||||
}
|
||||
return json_serializeable
|
||||
|
||||
@staticmethod
|
||||
def insert_defaults():
|
||||
roles = {
|
||||
'Anonymous': [],
|
||||
'Viewer': [
|
||||
CorpusFollowerPermission.VIEW
|
||||
],
|
||||
'Contributor': [
|
||||
CorpusFollowerPermission.VIEW,
|
||||
CorpusFollowerPermission.MANAGE_FILES
|
||||
],
|
||||
'Administrator': [
|
||||
CorpusFollowerPermission.VIEW,
|
||||
CorpusFollowerPermission.MANAGE_FILES,
|
||||
CorpusFollowerPermission.MANAGE_FOLLOWERS,
|
||||
CorpusFollowerPermission.MANAGE_CORPUS
|
||||
|
||||
]
|
||||
}
|
||||
default_role_name = 'Viewer'
|
||||
for role_name, permissions in roles.items():
|
||||
role = CorpusFollowerRole.query.filter_by(name=role_name).first()
|
||||
if role is None:
|
||||
role = CorpusFollowerRole(name=role_name)
|
||||
role.reset_permissions()
|
||||
for permission in permissions:
|
||||
role.add_permission(permission)
|
||||
role.default = role.name == default_role_name
|
||||
db.session.add(role)
|
||||
db.session.commit()
|
@ -1,133 +0,0 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from app import db, mail, socketio
|
||||
from app.email import create_message
|
||||
from .corpus_file import CorpusFile
|
||||
from .corpus_follower_association import CorpusFollowerAssociation
|
||||
from .corpus import Corpus
|
||||
from .job_input import JobInput
|
||||
from .job_result import JobResult
|
||||
from .job import Job, JobStatus
|
||||
from .spacy_nlp_pipeline_model import SpaCyNLPPipelineModel
|
||||
from .tesseract_ocr_pipeline_model import TesseractOCRPipelineModel
|
||||
from .user import UserSettingJobStatusMailNotificationLevel
|
||||
|
||||
|
||||
def register_event_listeners():
|
||||
resources = [
|
||||
Corpus,
|
||||
CorpusFile,
|
||||
Job,
|
||||
JobInput,
|
||||
JobResult,
|
||||
SpaCyNLPPipelineModel,
|
||||
TesseractOCRPipelineModel
|
||||
]
|
||||
|
||||
for resource in resources:
|
||||
db.event.listen(resource, 'after_delete', resource_after_delete)
|
||||
db.event.listen(resource, 'after_insert', resource_after_insert)
|
||||
db.event.listen(resource, 'after_update', resource_after_update)
|
||||
|
||||
db.event.listen(CorpusFollowerAssociation, 'after_delete', cfa_after_delete)
|
||||
db.event.listen(CorpusFollowerAssociation, 'after_insert', cfa_after_insert)
|
||||
|
||||
db.event.listen(Job, 'after_update', job_after_update)
|
||||
|
||||
|
||||
def resource_after_delete(mapper, connection, resource):
|
||||
jsonpatch = [
|
||||
{
|
||||
'op': 'remove',
|
||||
'path': resource.jsonpatch_path
|
||||
}
|
||||
]
|
||||
room = f'/users/{resource.user_hashid}'
|
||||
socketio.emit('PATCH', jsonpatch, room=room)
|
||||
|
||||
|
||||
def cfa_after_delete(mapper, connection, cfa):
|
||||
jsonpatch_path = f'/users/{cfa.corpus.user.hashid}/corpora/{cfa.corpus.hashid}/corpus_follower_associations/{cfa.hashid}'
|
||||
jsonpatch = [
|
||||
{
|
||||
'op': 'remove',
|
||||
'path': jsonpatch_path
|
||||
}
|
||||
]
|
||||
room = f'/users/{cfa.corpus.user.hashid}'
|
||||
socketio.emit('PATCH', jsonpatch, room=room)
|
||||
|
||||
|
||||
def resource_after_insert(mapper, connection, resource):
|
||||
jsonpatch_value = resource.to_json_serializeable()
|
||||
for attr in mapper.relationships:
|
||||
jsonpatch_value[attr.key] = {}
|
||||
jsonpatch = [
|
||||
{
|
||||
'op': 'add',
|
||||
'path': resource.jsonpatch_path,
|
||||
'value': jsonpatch_value
|
||||
}
|
||||
]
|
||||
room = f'/users/{resource.user_hashid}'
|
||||
socketio.emit('PATCH', jsonpatch, room=room)
|
||||
|
||||
|
||||
def cfa_after_insert(mapper, connection, cfa):
|
||||
jsonpatch_value = cfa.to_json_serializeable()
|
||||
jsonpatch_path = f'/users/{cfa.corpus.user.hashid}/corpora/{cfa.corpus.hashid}/corpus_follower_associations/{cfa.hashid}'
|
||||
jsonpatch = [
|
||||
{
|
||||
'op': 'add',
|
||||
'path': jsonpatch_path,
|
||||
'value': jsonpatch_value
|
||||
}
|
||||
]
|
||||
room = f'/users/{cfa.corpus.user.hashid}'
|
||||
socketio.emit('PATCH', jsonpatch, room=room)
|
||||
|
||||
|
||||
def resource_after_update(mapper, connection, resource):
|
||||
jsonpatch = []
|
||||
for attr in db.inspect(resource).attrs:
|
||||
if attr.key in mapper.relationships:
|
||||
continue
|
||||
if not attr.load_history().has_changes():
|
||||
continue
|
||||
jsonpatch_path = f'{resource.jsonpatch_path}/{attr.key}'
|
||||
if isinstance(attr.value, datetime):
|
||||
jsonpatch_value = f'{attr.value.isoformat()}Z'
|
||||
elif isinstance(attr.value, Enum):
|
||||
jsonpatch_value = attr.value.name
|
||||
else:
|
||||
jsonpatch_value = attr.value
|
||||
jsonpatch.append(
|
||||
{
|
||||
'op': 'replace',
|
||||
'path': jsonpatch_path,
|
||||
'value': jsonpatch_value
|
||||
}
|
||||
)
|
||||
if jsonpatch:
|
||||
room = f'/users/{resource.user_hashid}'
|
||||
socketio.emit('PATCH', jsonpatch, room=room)
|
||||
|
||||
|
||||
def job_after_update(mapper, connection, job):
|
||||
for attr in db.inspect(job).attrs:
|
||||
if attr.key != 'status':
|
||||
continue
|
||||
if not attr.load_history().has_changes():
|
||||
return
|
||||
if job.user.setting_job_status_mail_notification_level == UserSettingJobStatusMailNotificationLevel.NONE:
|
||||
return
|
||||
if job.user.setting_job_status_mail_notification_level == UserSettingJobStatusMailNotificationLevel.END:
|
||||
if job.status not in [JobStatus.COMPLETED, JobStatus.FAILED]:
|
||||
return
|
||||
msg = create_message(
|
||||
job.user.email,
|
||||
f'Status update for your Job "{job.title}"',
|
||||
'tasks/email/notification',
|
||||
job=job
|
||||
)
|
||||
mail.send(msg)
|
@ -1,40 +0,0 @@
|
||||
from datetime import datetime
|
||||
from flask import current_app
|
||||
from werkzeug.utils import secure_filename
|
||||
from app import db
|
||||
|
||||
|
||||
class FileMixin:
|
||||
'''
|
||||
Mixin for db.Model classes. All file related models should use this.
|
||||
'''
|
||||
creation_date = db.Column(db.DateTime, default=datetime.utcnow)
|
||||
filename = db.Column(db.String(255))
|
||||
mimetype = db.Column(db.String(255))
|
||||
|
||||
def file_mixin_to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
return {
|
||||
'creation_date': f'{self.creation_date.isoformat()}Z',
|
||||
'filename': self.filename,
|
||||
'mimetype': self.mimetype
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def create(cls, file_storage, **kwargs):
|
||||
filename = kwargs.pop('filename', file_storage.filename)
|
||||
mimetype = kwargs.pop('mimetype', file_storage.mimetype)
|
||||
obj = cls(
|
||||
filename=secure_filename(filename),
|
||||
mimetype=mimetype,
|
||||
**kwargs
|
||||
)
|
||||
db.session.add(obj)
|
||||
db.session.flush(objects=[obj])
|
||||
db.session.refresh(obj)
|
||||
try:
|
||||
file_storage.save(obj.path)
|
||||
except (AttributeError, OSError) as e:
|
||||
current_app.logger.error(e)
|
||||
db.session.rollback()
|
||||
raise e
|
||||
return obj
|
@ -1,172 +0,0 @@
|
||||
from datetime import datetime
|
||||
from enum import IntEnum
|
||||
from flask import current_app, url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
from app import db
|
||||
from app.ext.flask_sqlalchemy import ContainerColumn, IntEnumColumn
|
||||
|
||||
|
||||
class JobStatus(IntEnum):
|
||||
INITIALIZING = 1
|
||||
SUBMITTED = 2
|
||||
QUEUED = 3
|
||||
RUNNING = 4
|
||||
CANCELING = 5
|
||||
CANCELED = 6
|
||||
COMPLETED = 7
|
||||
FAILED = 8
|
||||
|
||||
@staticmethod
|
||||
def get(job_status: Union['JobStatus', int, str]) -> 'JobStatus':
|
||||
if isinstance(job_status, JobStatus):
|
||||
return job_status
|
||||
if isinstance(job_status, int):
|
||||
return JobStatus(job_status)
|
||||
if isinstance(job_status, str):
|
||||
return JobStatus[job_status]
|
||||
raise TypeError('job_status must be JobStatus, int, or str')
|
||||
|
||||
|
||||
class Job(HashidMixin, db.Model):
|
||||
'''
|
||||
Class to define Jobs.
|
||||
'''
|
||||
__tablename__ = 'jobs'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
# Fields
|
||||
creation_date = \
|
||||
db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
description = db.Column(db.String(255))
|
||||
end_date = db.Column(db.DateTime())
|
||||
service = db.Column(db.String(64))
|
||||
service_args = db.Column(ContainerColumn(dict, 255))
|
||||
service_version = db.Column(db.String(16))
|
||||
status = db.Column(
|
||||
IntEnumColumn(JobStatus),
|
||||
default=JobStatus.INITIALIZING
|
||||
)
|
||||
title = db.Column(db.String(32))
|
||||
# Relationships
|
||||
inputs = db.relationship(
|
||||
'JobInput',
|
||||
back_populates='job',
|
||||
cascade='all, delete-orphan',
|
||||
lazy='dynamic'
|
||||
)
|
||||
results = db.relationship(
|
||||
'JobResult',
|
||||
back_populates='job',
|
||||
cascade='all, delete-orphan',
|
||||
lazy='dynamic'
|
||||
)
|
||||
user = db.relationship(
|
||||
'User',
|
||||
back_populates='jobs'
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f'<Job {self.title}>'
|
||||
|
||||
@property
|
||||
def jsonpatch_path(self):
|
||||
return f'{self.user.jsonpatch_path}/jobs/{self.hashid}'
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.user.path / 'jobs' / f'{self.id}'
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for('jobs.job', job_id=self.id)
|
||||
|
||||
@property
|
||||
def user_hashid(self):
|
||||
return self.user.hashid
|
||||
|
||||
@staticmethod
|
||||
def create(**kwargs):
|
||||
job = Job(**kwargs)
|
||||
db.session.add(job)
|
||||
db.session.flush(objects=[job])
|
||||
db.session.refresh(job)
|
||||
job_inputs_dir = job.path / 'inputs'
|
||||
job_pipeline_data_dir = job.path / 'pipeline_data'
|
||||
job_results_dir = job.path / 'results'
|
||||
try:
|
||||
job.path.mkdir()
|
||||
job_inputs_dir.mkdir()
|
||||
job_pipeline_data_dir.mkdir()
|
||||
job_results_dir.mkdir()
|
||||
except OSError as e:
|
||||
# TODO: Potential leftover cleanup
|
||||
current_app.logger.error(e)
|
||||
db.session.rollback()
|
||||
raise
|
||||
return job
|
||||
|
||||
def delete(self):
|
||||
''' Delete the job and its inputs and results from the database. '''
|
||||
if self.status not in [JobStatus.COMPLETED, JobStatus.FAILED]: # noqa
|
||||
self.status = JobStatus.CANCELING
|
||||
db.session.commit()
|
||||
while self.status != JobStatus.CANCELED:
|
||||
# In case the daemon handled a job in any way
|
||||
if self.status != JobStatus.CANCELING:
|
||||
self.status = JobStatus.CANCELING
|
||||
db.session.commit()
|
||||
sleep(1)
|
||||
db.session.refresh(self)
|
||||
try:
|
||||
shutil.rmtree(self.path)
|
||||
except OSError as e:
|
||||
current_app.logger.error(e)
|
||||
db.session.rollback()
|
||||
raise e
|
||||
db.session.delete(self)
|
||||
|
||||
def restart(self):
|
||||
''' Restart a job - only if the status is failed '''
|
||||
if self.status != JobStatus.FAILED:
|
||||
raise Exception('Job status is not "failed"')
|
||||
shutil.rmtree(self.path / 'results', ignore_errors=True)
|
||||
shutil.rmtree(self.path / 'pyflow.data', ignore_errors=True)
|
||||
for result in self.results:
|
||||
db.session.delete(result)
|
||||
self.end_date = None
|
||||
self.status = JobStatus.SUBMITTED
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'creation_date': f'{self.creation_date.isoformat()}Z',
|
||||
'description': self.description,
|
||||
'end_date': (
|
||||
None if self.end_date is None
|
||||
else f'{self.end_date.isoformat()}Z'
|
||||
),
|
||||
'service': self.service,
|
||||
'service_args': self.service_args,
|
||||
'service_version': self.service_version,
|
||||
'status': self.status.name,
|
||||
'title': self.title
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['user'] = \
|
||||
self.user.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
json_serializeable['inputs'] = {
|
||||
x.hashid: x.to_json_serializeable(relationships=True)
|
||||
for x in self.inputs
|
||||
}
|
||||
json_serializeable['results'] = {
|
||||
x.hashid: x.to_json_serializeable(relationships=True)
|
||||
for x in self.results
|
||||
}
|
||||
return json_serializeable
|
@ -1,65 +0,0 @@
|
||||
from flask import url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from pathlib import Path
|
||||
from app import db
|
||||
from .file_mixin import FileMixin
|
||||
|
||||
|
||||
class JobInput(FileMixin, HashidMixin, db.Model):
|
||||
__tablename__ = 'job_inputs'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
job_id = db.Column(db.Integer, db.ForeignKey('jobs.id'))
|
||||
# Relationships
|
||||
job = db.relationship(
|
||||
'Job',
|
||||
back_populates='inputs'
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f'<JobInput {self.filename}>'
|
||||
|
||||
@property
|
||||
def content_url(self):
|
||||
return url_for(
|
||||
'jobs.download_job_input',
|
||||
job_id=self.job.id,
|
||||
job_input_id=self.id
|
||||
)
|
||||
|
||||
@property
|
||||
def jsonpatch_path(self):
|
||||
return f'{self.job.jsonpatch_path}/inputs/{self.hashid}'
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.job.path / 'inputs' / f'{self.id}'
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for(
|
||||
'jobs.job',
|
||||
job_id=self.job_id,
|
||||
_anchor=f'job-{self.job.hashid}-input-{self.hashid}'
|
||||
)
|
||||
|
||||
@property
|
||||
def user_hashid(self):
|
||||
return self.job.user.hashid
|
||||
|
||||
@property
|
||||
def user_id(self):
|
||||
return self.job.user.id
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
**self.file_mixin_to_json_serializeable()
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['job'] = \
|
||||
self.job.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
pass
|
||||
return json_serializeable
|
@ -1,71 +0,0 @@
|
||||
from flask import url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from pathlib import Path
|
||||
from app import db
|
||||
from .file_mixin import FileMixin
|
||||
|
||||
|
||||
class JobResult(FileMixin, HashidMixin, db.Model):
|
||||
__tablename__ = 'job_results'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
job_id = db.Column(db.Integer, db.ForeignKey('jobs.id'))
|
||||
# Fields
|
||||
description = db.Column(db.String(255))
|
||||
# Relationships
|
||||
job = db.relationship(
|
||||
'Job',
|
||||
back_populates='results'
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f'<JobResult {self.filename}>'
|
||||
|
||||
@property
|
||||
def download_url(self):
|
||||
return url_for(
|
||||
'jobs.download_job_result',
|
||||
job_id=self.job_id,
|
||||
job_result_id=self.id
|
||||
)
|
||||
|
||||
@property
|
||||
def jsonpatch_path(self):
|
||||
return f'{self.job.jsonpatch_path}/results/{self.hashid}'
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.job.path / 'results' / f'{self.id}'
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for(
|
||||
'jobs.job',
|
||||
job_id=self.job_id,
|
||||
_anchor=f'job-{self.job.hashid}-result-{self.hashid}'
|
||||
)
|
||||
|
||||
@property
|
||||
def user_hashid(self):
|
||||
return self.job.user.hashid
|
||||
|
||||
@property
|
||||
def user_id(self):
|
||||
return self.job.user.id
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'description': self.description,
|
||||
**self.file_mixin_to_json_serializeable(
|
||||
backrefs=backrefs,
|
||||
relationships=relationships
|
||||
)
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['job'] = \
|
||||
self.job.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
pass
|
||||
return json_serializeable
|
@ -1,100 +0,0 @@
|
||||
from enum import IntEnum
|
||||
from flask_hashids import HashidMixin
|
||||
from typing import Union
|
||||
from app import db
|
||||
|
||||
|
||||
class Permission(IntEnum):
|
||||
'''
|
||||
Defines User permissions as integers by the power of 2. User permission
|
||||
can be evaluated using the bitwise operator &.
|
||||
'''
|
||||
ADMINISTRATE = 1
|
||||
CONTRIBUTE = 2
|
||||
USE_API = 4
|
||||
|
||||
@staticmethod
|
||||
def get(permission: Union['Permission', int, str]) -> 'Permission':
|
||||
if isinstance(permission, Permission):
|
||||
return permission
|
||||
if isinstance(permission, int):
|
||||
return Permission(permission)
|
||||
if isinstance(permission, str):
|
||||
return Permission[permission]
|
||||
raise TypeError('permission must be Permission, int, or str')
|
||||
|
||||
|
||||
class Role(HashidMixin, db.Model):
|
||||
__tablename__ = 'roles'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Fields
|
||||
name = db.Column(db.String(64), unique=True)
|
||||
default = db.Column(db.Boolean, default=False, index=True)
|
||||
permissions = db.Column(db.Integer, default=0)
|
||||
# Relationships
|
||||
users = db.relationship('User', back_populates='role', lazy='dynamic')
|
||||
|
||||
def __repr__(self):
|
||||
return f'<Role {self.name}>'
|
||||
|
||||
def has_permission(self, permission: Union[Permission, int, str]):
|
||||
p = Permission.get(permission)
|
||||
return self.permissions & p.value == p.value
|
||||
|
||||
def add_permission(self, permission: Union[Permission, int, str]):
|
||||
p = Permission.get(permission)
|
||||
if not self.has_permission(p):
|
||||
self.permissions += p.value
|
||||
|
||||
def remove_permission(self, permission: Union[Permission, int, str]):
|
||||
p = Permission.get(permission)
|
||||
if self.has_permission(p):
|
||||
self.permissions -= p.value
|
||||
|
||||
def reset_permissions(self):
|
||||
self.permissions = 0
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'default': self.default,
|
||||
'name': self.name,
|
||||
'permissions': [
|
||||
x.name for x in Permission
|
||||
if self.has_permission(x.value)
|
||||
]
|
||||
}
|
||||
if backrefs:
|
||||
pass
|
||||
if relationships:
|
||||
json_serializeable['users'] = {
|
||||
x.hashid: x.to_json_serializeable(relationships=True)
|
||||
for x in self.users
|
||||
}
|
||||
return json_serializeable
|
||||
|
||||
@staticmethod
|
||||
def insert_defaults():
|
||||
roles = {
|
||||
'User': [],
|
||||
'API user': [Permission.USE_API],
|
||||
'Contributor': [Permission.CONTRIBUTE],
|
||||
'Administrator': [
|
||||
Permission.ADMINISTRATE,
|
||||
Permission.CONTRIBUTE,
|
||||
Permission.USE_API
|
||||
],
|
||||
'System user': []
|
||||
}
|
||||
default_role_name = 'User'
|
||||
for role_name, permissions in roles.items():
|
||||
role = Role.query.filter_by(name=role_name).first()
|
||||
if role is None:
|
||||
role = Role(name=role_name)
|
||||
role.reset_permissions()
|
||||
for permission in permissions:
|
||||
role.add_permission(permission)
|
||||
role.default = role.name == default_role_name
|
||||
db.session.add(role)
|
||||
db.session.commit()
|
@ -1,136 +0,0 @@
|
||||
from flask import current_app, url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from tqdm import tqdm
|
||||
from pathlib import Path
|
||||
import requests
|
||||
import yaml
|
||||
from app import db
|
||||
from app.ext.flask_sqlalchemy import ContainerColumn
|
||||
from .file_mixin import FileMixin
|
||||
from .user import User
|
||||
|
||||
|
||||
class SpaCyNLPPipelineModel(FileMixin, HashidMixin, db.Model):
|
||||
__tablename__ = 'spacy_nlp_pipeline_models'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
# Fields
|
||||
title = db.Column(db.String(64))
|
||||
description = db.Column(db.String(255))
|
||||
version = db.Column(db.String(16))
|
||||
compatible_service_versions = db.Column(ContainerColumn(list, 255))
|
||||
publisher = db.Column(db.String(128))
|
||||
publisher_url = db.Column(db.String(512))
|
||||
publishing_url = db.Column(db.String(512))
|
||||
publishing_year = db.Column(db.Integer)
|
||||
pipeline_name = db.Column(db.String(64))
|
||||
is_public = db.Column(db.Boolean, default=False)
|
||||
# Relationships
|
||||
user = db.relationship('User', back_populates='spacy_nlp_pipeline_models')
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.user.path / 'spacy_nlp_pipeline_models' / f'{self.id}'
|
||||
|
||||
@property
|
||||
def jsonpatch_path(self):
|
||||
return f'{self.user.jsonpatch_path}/spacy_nlp_pipeline_models/{self.hashid}'
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for(
|
||||
'contributions.spacy_nlp_pipeline_model',
|
||||
spacy_nlp_pipeline_model_id=self.id
|
||||
)
|
||||
|
||||
@property
|
||||
def user_hashid(self):
|
||||
return self.user.hashid
|
||||
|
||||
@staticmethod
|
||||
def insert_defaults(force_download=False):
|
||||
nopaque_user = User.query.filter_by(username='nopaque').first()
|
||||
default_records_file = Path(__file__).parent / 'default_records' / 'spacy_nlp_pipeline_model.yml'
|
||||
with default_records_file.open('r') as f:
|
||||
default_records = yaml.safe_load(f)
|
||||
for m in default_records:
|
||||
model = SpaCyNLPPipelineModel.query.filter_by(title=m['title'], version=m['version']).first() # noqa
|
||||
if model is not None:
|
||||
model.compatible_service_versions = m['compatible_service_versions']
|
||||
model.description = m['description']
|
||||
model.filename = m['url'].split('/')[-1]
|
||||
model.publisher = m['publisher']
|
||||
model.publisher_url = m['publisher_url']
|
||||
model.publishing_url = m['publishing_url']
|
||||
model.publishing_year = m['publishing_year']
|
||||
model.is_public = True
|
||||
model.title = m['title']
|
||||
model.version = m['version']
|
||||
model.pipeline_name = m['pipeline_name']
|
||||
else:
|
||||
model = SpaCyNLPPipelineModel(
|
||||
compatible_service_versions=m['compatible_service_versions'],
|
||||
description=m['description'],
|
||||
filename=m['url'].split('/')[-1],
|
||||
publisher=m['publisher'],
|
||||
publisher_url=m['publisher_url'],
|
||||
publishing_url=m['publishing_url'],
|
||||
publishing_year=m['publishing_year'],
|
||||
is_public=True,
|
||||
title=m['title'],
|
||||
user=nopaque_user,
|
||||
version=m['version'],
|
||||
pipeline_name=m['pipeline_name']
|
||||
)
|
||||
db.session.add(model)
|
||||
db.session.flush(objects=[model])
|
||||
db.session.refresh(model)
|
||||
if not model.path.exists() or force_download:
|
||||
r = requests.get(m['url'], stream=True)
|
||||
pbar = tqdm(
|
||||
desc=f'{model.title} ({model.filename})',
|
||||
unit="B",
|
||||
unit_scale=True,
|
||||
unit_divisor=1024,
|
||||
total=int(r.headers['Content-Length'])
|
||||
)
|
||||
pbar.clear()
|
||||
with open(model.path, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
pbar.update(len(chunk))
|
||||
f.write(chunk)
|
||||
pbar.close()
|
||||
db.session.commit()
|
||||
|
||||
def delete(self):
|
||||
try:
|
||||
self.path.unlink(missing_ok=True)
|
||||
except OSError as e:
|
||||
current_app.logger.error(e)
|
||||
raise
|
||||
db.session.delete(self)
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'compatible_service_versions': self.compatible_service_versions,
|
||||
'description': self.description,
|
||||
'publisher': self.publisher,
|
||||
'publisher_url': self.publisher_url,
|
||||
'publishing_url': self.publishing_url,
|
||||
'publishing_year': self.publishing_year,
|
||||
'pipeline_name': self.pipeline_name,
|
||||
'is_public': self.is_public,
|
||||
'title': self.title,
|
||||
'version': self.version,
|
||||
**self.file_mixin_to_json_serializeable()
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['user'] = \
|
||||
self.user.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
pass
|
||||
return json_serializeable
|
@ -1,132 +0,0 @@
|
||||
from flask import current_app, url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from tqdm import tqdm
|
||||
from pathlib import Path
|
||||
import requests
|
||||
import yaml
|
||||
from app import db
|
||||
from app.ext.flask_sqlalchemy import ContainerColumn
|
||||
from .file_mixin import FileMixin
|
||||
from .user import User
|
||||
|
||||
|
||||
class TesseractOCRPipelineModel(FileMixin, HashidMixin, db.Model):
|
||||
__tablename__ = 'tesseract_ocr_pipeline_models'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
# Fields
|
||||
title = db.Column(db.String(64))
|
||||
description = db.Column(db.String(255))
|
||||
version = db.Column(db.String(16))
|
||||
compatible_service_versions = db.Column(ContainerColumn(list, 255))
|
||||
publisher = db.Column(db.String(128))
|
||||
publisher_url = db.Column(db.String(512))
|
||||
publishing_url = db.Column(db.String(512))
|
||||
publishing_year = db.Column(db.Integer)
|
||||
is_public = db.Column(db.Boolean, default=False)
|
||||
# Relationships
|
||||
user = db.relationship('User', back_populates='tesseract_ocr_pipeline_models')
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return self.user.path / 'tesseract_ocr_pipeline_models' / f'{self.id}'
|
||||
|
||||
@property
|
||||
def jsonpatch_path(self):
|
||||
return f'{self.user.jsonpatch_path}/tesseract_ocr_pipeline_models/{self.hashid}'
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return url_for(
|
||||
'contributions.tesseract_ocr_pipeline_model',
|
||||
tesseract_ocr_pipeline_model_id=self.id
|
||||
)
|
||||
|
||||
@property
|
||||
def user_hashid(self):
|
||||
return self.user.hashid
|
||||
|
||||
@staticmethod
|
||||
def insert_defaults(force_download=False):
|
||||
nopaque_user = User.query.filter_by(username='nopaque').first()
|
||||
default_records_file = Path(__file__).parent / 'default_records' / 'tesseract_ocr_pipeline_model.yml'
|
||||
with default_records_file.open('r') as f:
|
||||
default_records = yaml.safe_load(f)
|
||||
for m in default_records:
|
||||
model = TesseractOCRPipelineModel.query.filter_by(title=m['title'], version=m['version']).first() # noqa
|
||||
if model is not None:
|
||||
model.compatible_service_versions = m['compatible_service_versions']
|
||||
model.description = m['description']
|
||||
model.filename = f'{model.id}.traineddata'
|
||||
model.publisher = m['publisher']
|
||||
model.publisher_url = m['publisher_url']
|
||||
model.publishing_url = m['publishing_url']
|
||||
model.publishing_year = m['publishing_year']
|
||||
model.is_public = True
|
||||
model.title = m['title']
|
||||
model.version = m['version']
|
||||
else:
|
||||
model = TesseractOCRPipelineModel(
|
||||
compatible_service_versions=m['compatible_service_versions'],
|
||||
description=m['description'],
|
||||
publisher=m['publisher'],
|
||||
publisher_url=m['publisher_url'],
|
||||
publishing_url=m['publishing_url'],
|
||||
publishing_year=m['publishing_year'],
|
||||
is_public=True,
|
||||
title=m['title'],
|
||||
user=nopaque_user,
|
||||
version=m['version']
|
||||
)
|
||||
db.session.add(model)
|
||||
db.session.flush(objects=[model])
|
||||
db.session.refresh(model)
|
||||
model.filename = f'{model.id}.traineddata'
|
||||
if not model.path.exists() or force_download:
|
||||
r = requests.get(m['url'], stream=True)
|
||||
pbar = tqdm(
|
||||
desc=f'{model.title} ({model.filename})',
|
||||
unit="B",
|
||||
unit_scale=True,
|
||||
unit_divisor=1024,
|
||||
total=int(r.headers['Content-Length'])
|
||||
)
|
||||
pbar.clear()
|
||||
with open(model.path, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
pbar.update(len(chunk))
|
||||
f.write(chunk)
|
||||
pbar.close()
|
||||
db.session.commit()
|
||||
|
||||
def delete(self):
|
||||
try:
|
||||
self.path.unlink(missing_ok=True)
|
||||
except OSError as e:
|
||||
current_app.logger.error(e)
|
||||
raise
|
||||
db.session.delete(self)
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'compatible_service_versions': self.compatible_service_versions,
|
||||
'description': self.description,
|
||||
'publisher': self.publisher,
|
||||
'publisher_url': self.publisher_url,
|
||||
'publishing_url': self.publishing_url,
|
||||
'publishing_year': self.publishing_year,
|
||||
'is_public': self.is_public,
|
||||
'title': self.title,
|
||||
'version': self.version,
|
||||
**self.file_mixin_to_json_serializeable()
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['user'] = \
|
||||
self.user.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
pass
|
||||
return json_serializeable
|
@ -1,48 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
from app import db
|
||||
|
||||
|
||||
class Token(db.Model):
|
||||
__tablename__ = 'tokens'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
|
||||
# Fields
|
||||
access_token = db.Column(db.String(64), index=True)
|
||||
access_expiration = db.Column(db.DateTime)
|
||||
refresh_token = db.Column(db.String(64), index=True)
|
||||
refresh_expiration = db.Column(db.DateTime)
|
||||
# Relationships
|
||||
user = db.relationship('User', back_populates='tokens')
|
||||
|
||||
def expire(self):
|
||||
self.access_expiration = datetime.utcnow()
|
||||
self.refresh_expiration = datetime.utcnow()
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'access_token': self.access_token,
|
||||
'access_expiration': (
|
||||
None if self.access_expiration is None
|
||||
else f'{self.access_expiration.isoformat()}Z'
|
||||
),
|
||||
'refresh_token': self.refresh_token,
|
||||
'refresh_expiration': (
|
||||
None if self.refresh_expiration is None
|
||||
else f'{self.refresh_expiration.isoformat()}Z'
|
||||
)
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['user'] = \
|
||||
self.user.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
pass
|
||||
return json_serializeable
|
||||
|
||||
@staticmethod
|
||||
def clean():
|
||||
"""Remove any tokens that have been expired for more than a day."""
|
||||
yesterday = datetime.utcnow() - timedelta(days=1)
|
||||
Token.query.filter(Token.refresh_expiration < yesterday).delete()
|
@ -1,452 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
from enum import IntEnum
|
||||
from flask import current_app, url_for
|
||||
from flask_hashids import HashidMixin
|
||||
from flask_login import UserMixin
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
import jwt
|
||||
import re
|
||||
import secrets
|
||||
import shutil
|
||||
from app import db, hashids
|
||||
from app.ext.flask_sqlalchemy import IntEnumColumn
|
||||
from .corpus import Corpus
|
||||
from .corpus_follower_association import CorpusFollowerAssociation
|
||||
from .corpus_follower_role import CorpusFollowerRole
|
||||
from .role import Permission, Role
|
||||
from .token import Token
|
||||
|
||||
|
||||
class ProfilePrivacySettings(IntEnum):
|
||||
SHOW_EMAIL = 1
|
||||
SHOW_LAST_SEEN = 2
|
||||
SHOW_MEMBER_SINCE = 4
|
||||
|
||||
@staticmethod
|
||||
def get(profile_privacy_setting: Union['ProfilePrivacySettings', int, str]) -> 'ProfilePrivacySettings':
|
||||
if isinstance(profile_privacy_setting, ProfilePrivacySettings):
|
||||
return profile_privacy_setting
|
||||
if isinstance(profile_privacy_setting, int):
|
||||
return ProfilePrivacySettings(profile_privacy_setting)
|
||||
if isinstance(profile_privacy_setting, str):
|
||||
return ProfilePrivacySettings[profile_privacy_setting]
|
||||
raise TypeError('profile_privacy_setting must be ProfilePrivacySettings, int, or str')
|
||||
|
||||
|
||||
class UserSettingJobStatusMailNotificationLevel(IntEnum):
|
||||
NONE = 1
|
||||
END = 2
|
||||
ALL = 3
|
||||
|
||||
|
||||
class User(HashidMixin, UserMixin, db.Model):
|
||||
__tablename__ = 'users'
|
||||
# Primary key
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# Foreign keys
|
||||
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
|
||||
# Fields
|
||||
email = db.Column(db.String(254), index=True, unique=True)
|
||||
username = db.Column(db.String(64), index=True, unique=True)
|
||||
username_pattern = re.compile(r'^[A-Za-zÄÖÜäöüß0-9_.]*$')
|
||||
password_hash = db.Column(db.String(128))
|
||||
confirmed = db.Column(db.Boolean, default=False)
|
||||
terms_of_use_accepted = db.Column(db.Boolean, default=False)
|
||||
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
|
||||
setting_job_status_mail_notification_level = db.Column(
|
||||
IntEnumColumn(UserSettingJobStatusMailNotificationLevel),
|
||||
default=UserSettingJobStatusMailNotificationLevel.END
|
||||
)
|
||||
last_seen = db.Column(db.DateTime())
|
||||
full_name = db.Column(db.String(64))
|
||||
about_me = db.Column(db.String(256))
|
||||
location = db.Column(db.String(64))
|
||||
website = db.Column(db.String(128))
|
||||
organization = db.Column(db.String(128))
|
||||
is_public = db.Column(db.Boolean, default=False)
|
||||
profile_privacy_settings = db.Column(db.Integer(), default=0)
|
||||
# Relationships
|
||||
avatar = db.relationship(
|
||||
'Avatar',
|
||||
back_populates='user',
|
||||
cascade='all, delete-orphan',
|
||||
uselist=False
|
||||
)
|
||||
corpora = db.relationship(
|
||||
'Corpus',
|
||||
back_populates='user',
|
||||
cascade='all, delete-orphan',
|
||||
lazy='dynamic'
|
||||
)
|
||||
corpus_follower_associations = db.relationship(
|
||||
'CorpusFollowerAssociation',
|
||||
back_populates='follower',
|
||||
cascade='all, delete-orphan'
|
||||
)
|
||||
followed_corpora = association_proxy(
|
||||
'corpus_follower_associations',
|
||||
'corpus',
|
||||
creator=lambda c: CorpusFollowerAssociation(corpus=c)
|
||||
)
|
||||
jobs = db.relationship(
|
||||
'Job',
|
||||
back_populates='user',
|
||||
cascade='all, delete-orphan',
|
||||
lazy='dynamic'
|
||||
)
|
||||
role = db.relationship(
|
||||
'Role',
|
||||
back_populates='users'
|
||||
)
|
||||
spacy_nlp_pipeline_models = db.relationship(
|
||||
'SpaCyNLPPipelineModel',
|
||||
back_populates='user',
|
||||
cascade='all, delete-orphan',
|
||||
lazy='dynamic'
|
||||
)
|
||||
tesseract_ocr_pipeline_models = db.relationship(
|
||||
'TesseractOCRPipelineModel',
|
||||
back_populates='user',
|
||||
cascade='all, delete-orphan',
|
||||
lazy='dynamic'
|
||||
)
|
||||
tokens = db.relationship(
|
||||
'Token',
|
||||
back_populates='user',
|
||||
cascade='all, delete-orphan',
|
||||
lazy='dynamic'
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if 'role' not in kwargs:
|
||||
kwargs['role'] = (
|
||||
Role.query.filter_by(name='Administrator').first()
|
||||
if kwargs['email'] == current_app.config['NOPAQUE_ADMIN']
|
||||
else Role.query.filter_by(default=True).first()
|
||||
)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def __repr__(self):
|
||||
return f'<User {self.username}>'
|
||||
|
||||
@property
|
||||
def jsonpatch_path(self):
|
||||
return f'/users/{self.hashid}'
|
||||
|
||||
@property
|
||||
def password(self):
|
||||
raise AttributeError('password is not a readable attribute')
|
||||
|
||||
@password.setter
|
||||
def password(self, password):
|
||||
self.password_hash = generate_password_hash(password)
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
return current_app.config.get('NOPAQUE_DATA_DIR') / 'users' / f'{self.id}'
|
||||
|
||||
@staticmethod
|
||||
def create(**kwargs):
|
||||
user = User(**kwargs)
|
||||
db.session.add(user)
|
||||
db.session.flush(objects=[user])
|
||||
db.session.refresh(user)
|
||||
user_spacy_nlp_pipeline_models_dir = user.path / 'spacy_nlp_pipeline_models'
|
||||
user_tesseract_ocr_pipeline_models_dir = user.path / 'tesseract_ocr_pipeline_models'
|
||||
user_corpora_dir = user.path / 'corpora'
|
||||
user_jobs_dir = user.path / 'jobs'
|
||||
try:
|
||||
user.path.mkdir()
|
||||
user_spacy_nlp_pipeline_models_dir.mkdir()
|
||||
user_tesseract_ocr_pipeline_models_dir.mkdir()
|
||||
user_corpora_dir.mkdir()
|
||||
user_jobs_dir.mkdir()
|
||||
except OSError as e:
|
||||
# TODO: Potential leftover cleanup
|
||||
current_app.logger.error(e)
|
||||
db.session.rollback()
|
||||
raise
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def insert_defaults():
|
||||
nopaque_user = User.query.filter_by(username='nopaque').first()
|
||||
system_user_role = Role.query.filter_by(name='System user').first()
|
||||
if nopaque_user is None:
|
||||
nopaque_user = User.create(
|
||||
username='nopaque',
|
||||
role=system_user_role
|
||||
)
|
||||
db.session.add(nopaque_user)
|
||||
elif nopaque_user.role != system_user_role:
|
||||
nopaque_user.role = system_user_role
|
||||
db.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def reset_password(token, new_password):
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
current_app.config['SECRET_KEY'],
|
||||
algorithms=['HS256'],
|
||||
issuer=current_app.config['SERVER_NAME'],
|
||||
options={'require': ['exp', 'iat', 'iss', 'purpose', 'sub']}
|
||||
)
|
||||
except jwt.PyJWTError:
|
||||
return False
|
||||
if payload.get('purpose') != 'User.reset_password':
|
||||
return False
|
||||
user_hashid = payload.get('sub')
|
||||
user_id = hashids.decode(user_hashid)
|
||||
user = User.query.get(user_id)
|
||||
if user is None:
|
||||
return False
|
||||
user.password = new_password
|
||||
db.session.add(user)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def verify_access_token(access_token, refresh_token=None):
|
||||
token = Token.query.filter(Token.access_token == access_token).first()
|
||||
if token is not None:
|
||||
if token.access_expiration > datetime.utcnow():
|
||||
token.user.ping()
|
||||
db.session.commit()
|
||||
if token.user.role.name != 'System user':
|
||||
return token.user
|
||||
|
||||
@staticmethod
|
||||
def verify_refresh_token(refresh_token, access_token):
|
||||
token = Token.query.filter((Token.refresh_token == refresh_token) & (Token.access_token == access_token)).first()
|
||||
if token is not None:
|
||||
if token.refresh_expiration > datetime.utcnow():
|
||||
return token
|
||||
# someone tried to refresh with an expired token
|
||||
# revoke all tokens from this user as a precaution
|
||||
token.user.revoke_auth_tokens()
|
||||
db.session.commit()
|
||||
|
||||
def can(self, permission):
|
||||
return self.role is not None and self.role.has_permission(permission)
|
||||
|
||||
def confirm(self, confirmation_token):
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
confirmation_token,
|
||||
current_app.config['SECRET_KEY'],
|
||||
algorithms=['HS256'],
|
||||
issuer=current_app.config['SERVER_NAME'],
|
||||
options={'require': ['exp', 'iat', 'iss', 'purpose', 'sub']}
|
||||
)
|
||||
except jwt.PyJWTError:
|
||||
return False
|
||||
if payload.get('purpose') != 'user.confirm':
|
||||
return False
|
||||
if payload.get('sub') != self.hashid:
|
||||
return False
|
||||
self.confirmed = True
|
||||
db.session.add(self)
|
||||
return True
|
||||
|
||||
def delete(self):
|
||||
shutil.rmtree(self.path, ignore_errors=True)
|
||||
db.session.delete(self)
|
||||
|
||||
def generate_auth_token(self):
|
||||
return Token(
|
||||
access_token=secrets.token_urlsafe(),
|
||||
access_expiration=datetime.utcnow() + timedelta(minutes=15),
|
||||
refresh_token=secrets.token_urlsafe(),
|
||||
refresh_expiration=datetime.utcnow() + timedelta(days=7),
|
||||
user=self
|
||||
)
|
||||
|
||||
def generate_confirm_token(self, expiration=3600):
|
||||
now = datetime.utcnow()
|
||||
payload = {
|
||||
'exp': now + timedelta(seconds=expiration),
|
||||
'iat': now,
|
||||
'iss': current_app.config['SERVER_NAME'],
|
||||
'purpose': 'user.confirm',
|
||||
'sub': self.hashid
|
||||
}
|
||||
return jwt.encode(
|
||||
payload,
|
||||
current_app.config['SECRET_KEY'],
|
||||
algorithm='HS256'
|
||||
)
|
||||
|
||||
def generate_reset_password_token(self, expiration=3600):
|
||||
now = datetime.utcnow()
|
||||
payload = {
|
||||
'exp': now + timedelta(seconds=expiration),
|
||||
'iat': now,
|
||||
'iss': current_app.config['SERVER_NAME'],
|
||||
'purpose': 'User.reset_password',
|
||||
'sub': self.hashid
|
||||
}
|
||||
return jwt.encode(
|
||||
payload,
|
||||
current_app.config['SECRET_KEY'],
|
||||
algorithm='HS256'
|
||||
)
|
||||
|
||||
def is_administrator(self):
|
||||
return self.can(Permission.ADMINISTRATE)
|
||||
|
||||
def ping(self):
|
||||
self.last_seen = datetime.utcnow()
|
||||
|
||||
def revoke_auth_tokens(self):
|
||||
for token in self.tokens:
|
||||
db.session.delete(token)
|
||||
|
||||
def verify_password(self, password):
|
||||
if self.role.name == 'System user':
|
||||
return False
|
||||
return check_password_hash(self.password_hash, password)
|
||||
|
||||
#region Profile Privacy settings
|
||||
def has_profile_privacy_setting(self, setting):
|
||||
s = ProfilePrivacySettings.get(setting)
|
||||
return self.profile_privacy_settings & s.value == s.value
|
||||
|
||||
def add_profile_privacy_setting(self, setting):
|
||||
s = ProfilePrivacySettings.get(setting)
|
||||
if not self.has_profile_privacy_setting(s):
|
||||
self.profile_privacy_settings += s.value
|
||||
|
||||
def remove_profile_privacy_setting(self, setting):
|
||||
s = ProfilePrivacySettings.get(setting)
|
||||
if self.has_profile_privacy_setting(s):
|
||||
self.profile_privacy_settings -= s.value
|
||||
|
||||
def reset_profile_privacy_settings(self):
|
||||
self.profile_privacy_settings = 0
|
||||
#endregion Profile Privacy settings
|
||||
|
||||
def follow_corpus(self, corpus, role=None):
|
||||
if role is None:
|
||||
cfr = CorpusFollowerRole.query.filter_by(default=True).first()
|
||||
else:
|
||||
cfr = role
|
||||
if self.is_following_corpus(corpus):
|
||||
cfa = CorpusFollowerAssociation.query.filter_by(corpus=corpus, follower=self).first()
|
||||
if cfa.role != cfr:
|
||||
cfa.role = cfr
|
||||
else:
|
||||
cfa = CorpusFollowerAssociation(corpus=corpus, role=cfr, follower=self)
|
||||
db.session.add(cfa)
|
||||
|
||||
def unfollow_corpus(self, corpus):
|
||||
if not self.is_following_corpus(corpus):
|
||||
return
|
||||
self.followed_corpora.remove(corpus)
|
||||
|
||||
def is_following_corpus(self, corpus):
|
||||
return corpus in self.followed_corpora
|
||||
|
||||
def generate_follow_corpus_token(self, corpus_hashid, role_name, expiration=7):
|
||||
now = datetime.utcnow()
|
||||
payload = {
|
||||
'exp': expiration,
|
||||
'iat': now,
|
||||
'iss': current_app.config['SERVER_NAME'],
|
||||
'purpose': 'User.follow_corpus',
|
||||
'role_name': role_name,
|
||||
'sub': corpus_hashid
|
||||
}
|
||||
return jwt.encode(
|
||||
payload,
|
||||
current_app.config['SECRET_KEY'],
|
||||
algorithm='HS256'
|
||||
)
|
||||
|
||||
def follow_corpus_by_token(self, token):
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
current_app.config['SECRET_KEY'],
|
||||
algorithms=['HS256'],
|
||||
issuer=current_app.config['SERVER_NAME'],
|
||||
options={'require': ['exp', 'iat', 'iss', 'purpose', 'role_name', 'sub']}
|
||||
)
|
||||
except jwt.PyJWTError:
|
||||
return False
|
||||
if payload.get('purpose') != 'User.follow_corpus':
|
||||
return False
|
||||
corpus_hashid = payload.get('sub')
|
||||
corpus_id = hashids.decode(corpus_hashid)
|
||||
corpus = Corpus.query.get_or_404(corpus_id)
|
||||
if corpus is None:
|
||||
return False
|
||||
role_name = payload.get('role_name')
|
||||
role = CorpusFollowerRole.query.filter_by(name=role_name).first()
|
||||
if role is None:
|
||||
return False
|
||||
self.follow_corpus(corpus, role)
|
||||
# db.session.add(self)
|
||||
return True
|
||||
|
||||
def to_json_serializeable(self, backrefs=False, relationships=False, filter_by_privacy_settings=False):
|
||||
json_serializeable = {
|
||||
'id': self.hashid,
|
||||
'confirmed': self.confirmed,
|
||||
'avatar': url_for('users.user_avatar', user_id=self.id),
|
||||
'email': self.email,
|
||||
'last_seen': (
|
||||
None if self.last_seen is None
|
||||
else f'{self.last_seen.isoformat()}Z'
|
||||
),
|
||||
'member_since': f'{self.member_since.isoformat()}Z',
|
||||
'username': self.username,
|
||||
'full_name': self.full_name,
|
||||
'about_me': self.about_me,
|
||||
'website': self.website,
|
||||
'location': self.location,
|
||||
'organization': self.organization,
|
||||
'job_status_mail_notification_level': \
|
||||
self.setting_job_status_mail_notification_level.name,
|
||||
'profile_privacy_settings': {
|
||||
'is_public': self.is_public,
|
||||
'show_email': self.has_profile_privacy_setting(ProfilePrivacySettings.SHOW_EMAIL),
|
||||
'show_last_seen': self.has_profile_privacy_setting(ProfilePrivacySettings.SHOW_LAST_SEEN),
|
||||
'show_member_since': self.has_profile_privacy_setting(ProfilePrivacySettings.SHOW_MEMBER_SINCE)
|
||||
}
|
||||
}
|
||||
if backrefs:
|
||||
json_serializeable['role'] = \
|
||||
self.role.to_json_serializeable(backrefs=True)
|
||||
if relationships:
|
||||
json_serializeable['corpus_follower_associations'] = {
|
||||
x.hashid: x.to_json_serializeable()
|
||||
for x in self.corpus_follower_associations
|
||||
}
|
||||
json_serializeable['corpora'] = {
|
||||
x.hashid: x.to_json_serializeable(relationships=True)
|
||||
for x in self.corpora
|
||||
}
|
||||
json_serializeable['jobs'] = {
|
||||
x.hashid: x.to_json_serializeable(relationships=True)
|
||||
for x in self.jobs
|
||||
}
|
||||
json_serializeable['tesseract_ocr_pipeline_models'] = {
|
||||
x.hashid: x.to_json_serializeable(relationships=True)
|
||||
for x in self.tesseract_ocr_pipeline_models
|
||||
}
|
||||
json_serializeable['spacy_nlp_pipeline_models'] = {
|
||||
x.hashid: x.to_json_serializeable(relationships=True)
|
||||
for x in self.spacy_nlp_pipeline_models
|
||||
}
|
||||
|
||||
if filter_by_privacy_settings:
|
||||
if not self.has_profile_privacy_setting(ProfilePrivacySettings.SHOW_EMAIL):
|
||||
json_serializeable.pop('email')
|
||||
if not self.has_profile_privacy_setting(ProfilePrivacySettings.SHOW_LAST_SEEN):
|
||||
json_serializeable.pop('last_seen')
|
||||
if not self.has_profile_privacy_setting(ProfilePrivacySettings.SHOW_MEMBER_SINCE):
|
||||
json_serializeable.pop('member_since')
|
||||
return json_serializeable
|
@ -1,11 +1,12 @@
|
||||
from flask import Blueprint
|
||||
from flask_login import login_required
|
||||
from pathlib import Path
|
||||
import os
|
||||
import yaml
|
||||
|
||||
|
||||
services_file = Path(__file__).parent / 'services.yml'
|
||||
with services_file.open('r') as f:
|
||||
services_file = \
|
||||
os.path.join(os.path.dirname(os.path.abspath(__file__)), 'services.yml')
|
||||
with open(services_file, 'r') as f:
|
||||
SERVICES = yaml.safe_load(f)
|
||||
|
||||
bp = Blueprint('services', __name__)
|
||||
|
@ -6,7 +6,6 @@ from app import db, hashids
|
||||
from app.models import (
|
||||
Job,
|
||||
JobInput,
|
||||
JobResult,
|
||||
JobStatus,
|
||||
TesseractOCRPipelineModel,
|
||||
SpaCyNLPPipelineModel
|
||||
@ -62,7 +61,7 @@ def file_setup_pipeline():
|
||||
return {}, 201, {'Location': job.url}
|
||||
return render_template(
|
||||
'services/file_setup_pipeline.html.j2',
|
||||
title=service_manifest['name'],
|
||||
service_manifest=service_manifest,
|
||||
form=form
|
||||
)
|
||||
|
||||
@ -75,8 +74,6 @@ def tesseract_ocr_pipeline():
|
||||
version = request.args.get('version', service_manifest['latest_version'])
|
||||
if version not in service_manifest['versions']:
|
||||
abort(404)
|
||||
job_results = JobResult.query.all()
|
||||
choosable_job_ids = [job_result.job.hashid for job_result in job_results if job_result.job.service == "file-setup-pipeline" and job_result.filename.endswith('.pdf')]
|
||||
form = CreateTesseractOCRPipelineJobForm(prefix='create-job-form', version=version)
|
||||
if form.is_submitted():
|
||||
if not form.validate():
|
||||
@ -113,8 +110,7 @@ def tesseract_ocr_pipeline():
|
||||
user_tesseract_ocr_pipeline_models_count = len(current_user.tesseract_ocr_pipeline_models.all())
|
||||
return render_template(
|
||||
'services/tesseract_ocr_pipeline.html.j2',
|
||||
title=service_manifest['name'],
|
||||
choosable_job_ids=choosable_job_ids,
|
||||
service_manifest=service_manifest,
|
||||
form=form,
|
||||
tesseract_ocr_pipeline_models=tesseract_ocr_pipeline_models,
|
||||
user_tesseract_ocr_pipeline_models_count=user_tesseract_ocr_pipeline_models_count
|
||||
@ -173,7 +169,7 @@ def transkribus_htr_pipeline():
|
||||
return {}, 201, {'Location': job.url}
|
||||
return render_template(
|
||||
'services/transkribus_htr_pipeline.html.j2',
|
||||
title=service_manifest['name'],
|
||||
service_manifest=service_manifest,
|
||||
form=form,
|
||||
transkribus_htr_pipeline_models=transkribus_htr_pipeline_models
|
||||
)
|
||||
@ -219,7 +215,7 @@ def spacy_nlp_pipeline():
|
||||
return {}, 201, {'Location': job.url}
|
||||
return render_template(
|
||||
'services/spacy_nlp_pipeline.html.j2',
|
||||
title=service_manifest['name'],
|
||||
service_manifest=service_manifest,
|
||||
form=form,
|
||||
spacy_nlp_pipeline_models=spacy_nlp_pipeline_models,
|
||||
user_spacy_nlp_pipeline_models_count=user_spacy_nlp_pipeline_models_count
|
||||
|
@ -7,28 +7,39 @@ file-setup-pipeline:
|
||||
0.1.0:
|
||||
publishing_year: 2022
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/file-setup-pipeline/-/releases/v0.1.0'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/file-setup-pipeline/-/tree/v0.1.0'
|
||||
tesseract-ocr-pipeline:
|
||||
name: 'Tesseract OCR Pipeline'
|
||||
publisher: 'Bielefeld University - CRC 1288 - INF'
|
||||
latest_version: '0.1.2'
|
||||
latest_version: '0.1.3'
|
||||
versions:
|
||||
0.1.0:
|
||||
methods:
|
||||
- 'binarization'
|
||||
publishing_year: 2022
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/tesseract-ocr-pipeline/-/releases/v0.1.0'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/tesseract-ocr-pipeline/-/tree/v0.1.0'
|
||||
0.1.1:
|
||||
methods:
|
||||
- 'binarization'
|
||||
- 'ocropus_nlbin_threshold'
|
||||
publishing_year: 2022
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/tesseract-ocr-pipeline/-/releases/v0.1.1'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/tesseract-ocr-pipeline/-/tree/v0.1.1'
|
||||
0.1.2:
|
||||
methods:
|
||||
- 'binarization'
|
||||
- 'ocropus_nlbin_threshold'
|
||||
publishing_year: 2023
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/tesseract-ocr-pipeline/-/releases/v0.1.2'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/tesseract-ocr-pipeline/-/tree/v0.1.2'
|
||||
0.1.3:
|
||||
methods:
|
||||
- 'binarization'
|
||||
- 'ocropus_nlbin_threshold'
|
||||
publishing_year: 2023
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/tesseract-ocr-pipeline/-/releases/v0.1.3'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/tesseract-ocr-pipeline/-/tree/v0.1.3'
|
||||
transkribus-htr-pipeline:
|
||||
name: 'Transkribus HTR Pipeline'
|
||||
publisher: 'Bielefeld University - CRC 1288 - INF'
|
||||
@ -39,28 +50,51 @@ transkribus-htr-pipeline:
|
||||
- 'binarization'
|
||||
publishing_year: 2022
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/transkribus-htr-pipeline/-/releases/v0.1.0'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/transkribus-htr-pipeline/-/tree/v0.1.0'
|
||||
0.1.1:
|
||||
methods:
|
||||
- 'binarization'
|
||||
publishing_year: 2022
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/transkribus-htr-pipeline/-/releases/v0.1.1'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/transkribus-htr-pipeline/-/tree/v0.1.1'
|
||||
spacy-nlp-pipeline:
|
||||
name: 'SpaCy NLP Pipeline'
|
||||
publisher: 'Bielefeld University - CRC 1288 - INF'
|
||||
latest_version: '0.1.1'
|
||||
latest_version: '0.1.5'
|
||||
versions:
|
||||
0.1.0:
|
||||
methods:
|
||||
- 'encoding_detection'
|
||||
publishing_year: 2022
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/releases/v0.1.0'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/tree/v0.1.0'
|
||||
0.1.1:
|
||||
methods:
|
||||
- 'encoding_detection'
|
||||
publishing_year: 2022
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/releases/v0.1.1'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/tree/v0.1.1'
|
||||
0.1.2:
|
||||
methods:
|
||||
- 'encoding_detection'
|
||||
publishing_year: 2024
|
||||
publishing_year: 2022
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/releases/v0.1.2'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/tree/v0.1.2'
|
||||
0.1.3:
|
||||
methods:
|
||||
- 'encoding_detection'
|
||||
publishing_year: 2023
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/releases/v0.1.3'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/tree/v0.1.3'
|
||||
0.1.4:
|
||||
methods:
|
||||
- 'encoding_detection'
|
||||
publishing_year: 2023
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/releases/v0.1.4'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/tree/v0.1.4'
|
||||
0.1.5:
|
||||
methods:
|
||||
- 'encoding_detection'
|
||||
publishing_year: 2023
|
||||
url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/releases/v0.1.5'
|
||||
code_url: 'https://gitlab.ub.uni-bielefeld.de/sfb1288inf/spacy-nlp-pipeline/-/tree/v0.1.5'
|
||||
|
@ -1,108 +1,132 @@
|
||||
#corpus-analysis-concordance-query-builder-input-field {
|
||||
border-bottom: #9E9E9E 1px solid;
|
||||
min-height: 38px;
|
||||
margin-top: 23px;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-query-builder-input-field-placeholder {
|
||||
color: #9E9E9E;
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
.modal-conent {
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-positional-attr-modal, #corpus-analysis-concordance-corpus-analysis-concordance-structural-attr-modal {
|
||||
#concordance-query-builder {
|
||||
width: 70%;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-general-options-query-builder-tutorial-info-icon {
|
||||
#concordance-query-builder nav {
|
||||
background-color: #6B3F89;
|
||||
margin-top: -25px;
|
||||
margin-left: -25px;
|
||||
width: 105%;
|
||||
}
|
||||
|
||||
#query-builder-nav{
|
||||
padding-left: 15px;
|
||||
}
|
||||
|
||||
#close-query-builder {
|
||||
margin-right: 50px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#general-options-query-builder-tutorial-info-icon {
|
||||
color: black;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-insert-query-button {
|
||||
#your-query {
|
||||
border-bottom-style: solid;
|
||||
border-bottom-width: 1px;
|
||||
}
|
||||
|
||||
#insert-query-button {
|
||||
background-color: #00426f;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.attr-modal-header {
|
||||
#structural-attr h6 {
|
||||
margin-left: 15px;
|
||||
}
|
||||
|
||||
#add-structural-attribute-tutorial-info-icon {
|
||||
color: black;
|
||||
}
|
||||
|
||||
#sentence {
|
||||
background-color:#FD9720;
|
||||
}
|
||||
|
||||
#entity {
|
||||
background-color: #A6E22D;
|
||||
}
|
||||
|
||||
#text-annotation {
|
||||
background-color: #2FBBAB;
|
||||
}
|
||||
|
||||
#no-value-metadata-message {
|
||||
padding-top: 25px;
|
||||
margin-left: -20px;
|
||||
}
|
||||
|
||||
#token-kind-selector {
|
||||
background-color: #f2eff7;
|
||||
padding: 15px;
|
||||
padding-left: 25px;
|
||||
border-top: 10px solid #6B3F89;
|
||||
margin-left: -24px;
|
||||
margin-top: -24px;
|
||||
margin-right: -24px;
|
||||
border-top-style: solid;
|
||||
border-color: #6B3F89;
|
||||
}
|
||||
|
||||
.attr-modal-header h6 {
|
||||
#token-kind-selector.s5 {
|
||||
margin-top: 15px;
|
||||
}
|
||||
|
||||
#token-kind-selector h6 {
|
||||
margin-left: 15px;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-add-structural-attribute-tutorial-info-icon {
|
||||
#token-tutorial-info-icon {
|
||||
color: black;
|
||||
}
|
||||
|
||||
[data-structural-attr-modal-action-button="sentence"]{
|
||||
background-color:#FD9720 !important;
|
||||
}
|
||||
|
||||
[data-structural-attr-modal-action-button="entity"]{
|
||||
background-color: #A6E22D !important;
|
||||
}
|
||||
|
||||
[data-structural-attr-modal-action-button="meta-data"]{
|
||||
background-color: #2FBBAB !important;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-no-value-metadata-message {
|
||||
#no-value-message {
|
||||
padding-top: 25px;
|
||||
margin-left: -20px;
|
||||
}
|
||||
|
||||
.attr-modal-header.input-field {
|
||||
margin-left: 41px;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-token-attr {
|
||||
margin-left: 41px;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-token-tutorial-info-icon {
|
||||
color: black;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-no-value-message {
|
||||
padding-top: 25px;
|
||||
margin-left: -20px;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-token-edit-options h6 {
|
||||
#token-edit-options h6 {
|
||||
margin-left: 15px;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-edit-options-tutorial-info-icon {
|
||||
#edit-options-tutorial-info-icon {
|
||||
color: black;
|
||||
}
|
||||
|
||||
[data-toggle-area="input-field-options"] a {
|
||||
margin-right: 10px;
|
||||
#incidence-modifiers-button a{
|
||||
background-color: #2FBBAB;
|
||||
}
|
||||
|
||||
[data-target="corpus-analysis-concordance-character-incidence-modifiers-dropdown"], [data-target="corpus-analysis-concordance-token-incidence-modifiers-dropdown"] {
|
||||
background-color: #2FBBAB !important;
|
||||
#incidence-modifiers a{
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
#corpus-analysis-concordance-exactly-n-token-modal, #corpus-analysis-concordance-between-nm-token-modal {
|
||||
width: 30%;
|
||||
#ignore-case {
|
||||
margin-left: 5px;
|
||||
}
|
||||
|
||||
[data-modal-id="corpus-analysis-concordance-exactly-n-token-modal"], [data-modal-id="corpus-analysis-concordance-between-nm-token-modal"] {
|
||||
margin-top: 15px !important;
|
||||
#or, #and {
|
||||
background-color: #fc0;
|
||||
}
|
||||
|
||||
[data-options-action="and"], [data-options-action="or"] {
|
||||
background-color: #fc0 !important;
|
||||
#betweenNM {
|
||||
width: 60%;
|
||||
}
|
||||
|
||||
#query-builder-tutorial-modal {
|
||||
width: 60%;
|
||||
}
|
||||
|
||||
#query-builder-tutorial-modal ul {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
#query-builder-tutorial {
|
||||
padding:15px;
|
||||
}
|
||||
|
||||
#scroll-up-button-query-builder-tutorial {
|
||||
background-color: #28B3D1;
|
||||
}
|
||||
|
||||
[data-type="start-sentence"], [data-type="end-sentence"] {
|
||||
@ -110,18 +134,13 @@
|
||||
}
|
||||
|
||||
[data-type="start-empty-entity"], [data-type="start-entity"], [data-type="end-entity"] {
|
||||
background-color: #a6e22d;
|
||||
background-color: #A6E22D;
|
||||
}
|
||||
|
||||
[data-type="text-annotation"]{
|
||||
[data-type="start-text-annotation"]{
|
||||
background-color: #2FBBAB;
|
||||
}
|
||||
|
||||
[data-type="token"] {
|
||||
background-color: #28B3D1;
|
||||
}
|
||||
|
||||
[data-type="token-incidence-modifier"] {
|
||||
background-color: #4db6ac;
|
||||
color: white;
|
||||
}
|
||||
|
Before Width: | Height: | Size: 123 KiB After Width: | Height: | Size: 222 KiB |
Before Width: | Height: | Size: 402 KiB After Width: | Height: | Size: 378 KiB |
Before Width: | Height: | Size: 720 KiB |
Before Width: | Height: | Size: 589 KiB After Width: | Height: | Size: 854 KiB |
Before Width: | Height: | Size: 436 KiB |
Before Width: | Height: | Size: 189 KiB |
Before Width: | Height: | Size: 381 KiB After Width: | Height: | Size: 511 KiB |
Before Width: | Height: | Size: 759 KiB After Width: | Height: | Size: 1009 KiB |
Before Width: | Height: | Size: 750 KiB After Width: | Height: | Size: 903 KiB |
Before Width: | Height: | Size: 524 KiB After Width: | Height: | Size: 413 KiB |
Before Width: | Height: | Size: 23 KiB |
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 34 KiB |
104
app/static/js/App.js
Normal file
@ -0,0 +1,104 @@
|
||||
class App {
|
||||
constructor() {
|
||||
this.data = {
|
||||
promises: {getUser: {}, subscribeUser: {}},
|
||||
users: {},
|
||||
};
|
||||
this.socket = io({transports: ['websocket'], upgrade: false});
|
||||
this.socket.on('PATCH', (patch) => {this.onPatch(patch);});
|
||||
}
|
||||
|
||||
getUser(userId) {
|
||||
if (userId in this.data.promises.getUser) {
|
||||
return this.data.promises.getUser[userId];
|
||||
}
|
||||
|
||||
this.data.promises.getUser[userId] = new Promise((resolve, reject) => {
|
||||
this.socket.emit('GET /users/<user_id>', userId, (response) => {
|
||||
if (response.status === 200) {
|
||||
this.data.users[userId] = response.body;
|
||||
resolve(this.data.users[userId]);
|
||||
} else {
|
||||
reject(`[${response.status}] ${response.statusText}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return this.data.promises.getUser[userId];
|
||||
}
|
||||
|
||||
subscribeUser(userId) {
|
||||
if (userId in this.data.promises.subscribeUser) {
|
||||
return this.data.promises.subscribeUser[userId];
|
||||
}
|
||||
|
||||
this.data.promises.subscribeUser[userId] = new Promise((resolve, reject) => {
|
||||
this.socket.emit('SUBSCRIBE /users/<user_id>', userId, (response) => {
|
||||
if (response.status !== 200) {
|
||||
reject(response);
|
||||
return;
|
||||
}
|
||||
resolve(response);
|
||||
});
|
||||
});
|
||||
|
||||
return this.data.promises.subscribeUser[userId];
|
||||
}
|
||||
|
||||
flash(message, category) {
|
||||
let iconPrefix = '';
|
||||
switch (category) {
|
||||
case 'corpus': {
|
||||
iconPrefix = '<i class="left material-icons">book</i>';
|
||||
break;
|
||||
}
|
||||
case 'error': {
|
||||
iconPrefix = '<i class="error-color-text left material-icons">error</i>';
|
||||
break;
|
||||
}
|
||||
case 'job': {
|
||||
iconPrefix = '<i class="left nopaque-icons">J</i>';
|
||||
break;
|
||||
}
|
||||
case 'settings': {
|
||||
iconPrefix = '<i class="left material-icons">settings</i>';
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
iconPrefix = '<i class="left material-icons">notifications</i>';
|
||||
break;
|
||||
}
|
||||
}
|
||||
let toast = M.toast(
|
||||
{
|
||||
html: `
|
||||
<span>${iconPrefix}${message}</span>
|
||||
<button class="action-button btn-flat toast-action white-text" data-action="close">
|
||||
<i class="material-icons">close</i>
|
||||
</button>
|
||||
`.trim()
|
||||
}
|
||||
);
|
||||
let toastCloseActionElement = toast.el.querySelector('.action-button[data-action="close"]');
|
||||
toastCloseActionElement.addEventListener('click', () => {toast.dismiss();});
|
||||
}
|
||||
|
||||
onPatch(patch) {
|
||||
// Filter Patch to only include operations on users that are initialized
|
||||
let regExp = new RegExp(`^/users/(${Object.keys(this.data.users).join('|')})`);
|
||||
let filteredPatch = patch.filter(operation => regExp.test(operation.path));
|
||||
|
||||
// Handle job status updates
|
||||
let subRegExp = new RegExp(`^/users/([A-Za-z0-9]*)/jobs/([A-Za-z0-9]*)/status$`);
|
||||
let subFilteredPatch = filteredPatch
|
||||
.filter((operation) => {return operation.op === 'replace';})
|
||||
.filter((operation) => {return subRegExp.test(operation.path);});
|
||||
for (let operation of subFilteredPatch) {
|
||||
let [match, userId, jobId] = operation.path.match(subRegExp);
|
||||
this.flash(`[<a href="/jobs/${jobId}">${this.data.users[userId].jobs[jobId].title}</a>] New status: <span class="job-status-text" data-status="${operation.value}"></span>`, 'job');
|
||||
}
|
||||
|
||||
// Apply Patch
|
||||
jsonpatch.applyPatch(this.data, filteredPatch);
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
nopaque.corpus_analysis.App = class App {
|
||||
class CorpusAnalysisApp {
|
||||
constructor(corpusId) {
|
||||
this.corpusId = corpusId;
|
||||
|
||||
@ -6,10 +6,10 @@ nopaque.corpus_analysis.App = class App {
|
||||
|
||||
// HTML elements
|
||||
this.elements = {
|
||||
container: document.querySelector('#corpus-analysis-container'),
|
||||
extensionCards: document.querySelector('#corpus-analysis-extension-cards'),
|
||||
extensionTabs: document.querySelector('#corpus-analysis-extension-tabs'),
|
||||
initModal: document.querySelector('#corpus-analysis-init-modal')
|
||||
container: document.querySelector('#corpus-analysis-app-container'),
|
||||
extensionCards: document.querySelector('#corpus-analysis-app-extension-cards'),
|
||||
extensionTabs: document.querySelector('#corpus-analysis-app-extension-tabs'),
|
||||
initModal: document.querySelector('#corpus-analysis-app-init-modal')
|
||||
};
|
||||
// Materialize elements
|
||||
this.elements.m = {
|
||||
@ -25,12 +25,12 @@ nopaque.corpus_analysis.App = class App {
|
||||
async init() {
|
||||
this.disableActionElements();
|
||||
this.elements.m.initModal.open();
|
||||
|
||||
|
||||
try {
|
||||
// Setup CQi over SocketIO connection and gather data from the CQPServer
|
||||
const statusTextElement = this.elements.initModal.querySelector('.status-text');
|
||||
statusTextElement.innerText = 'Creating CQi over SocketIO client...';
|
||||
const cqiClient = new nopaque.corpus_analysis.cqi.Client('/cqi_over_sio');
|
||||
const cqiClient = new cqi.CQiClient('/cqi_over_sio');
|
||||
statusTextElement.innerText += ' Done';
|
||||
statusTextElement.innerHTML = 'Waiting for the CQP server...';
|
||||
const response = await cqiClient.api.socket.emitWithAck('init', this.corpusId);
|
@ -1,4 +1,4 @@
|
||||
nopaque.corpus_analysis.ConcordanceExtension = class ConcordanceExtension {
|
||||
class CorpusAnalysisConcordance {
|
||||
name = 'Concordance';
|
||||
|
||||
constructor(app) {
|
||||
@ -7,38 +7,33 @@ nopaque.corpus_analysis.ConcordanceExtension = class ConcordanceExtension {
|
||||
this.data = {};
|
||||
|
||||
this.elements = {
|
||||
container: document.querySelector(`#corpus-analysis-concordance-container`),
|
||||
error: document.querySelector(`#corpus-analysis-concordance-error`),
|
||||
userInterfaceForm: document.querySelector(`#corpus-analysis-concordance-user-interface-form`),
|
||||
expertModeForm: document.querySelector(`#corpus-analysis-concordance-expert-mode-form`),
|
||||
queryBuilderForm: document.querySelector(`#corpus-analysis-concordance-query-builder-form`),
|
||||
progress: document.querySelector(`#corpus-analysis-concordance-progress`),
|
||||
subcorpusInfo: document.querySelector(`#corpus-analysis-concordance-subcorpus-info`),
|
||||
subcorpusActions: document.querySelector(`#corpus-analysis-concordance-subcorpus-actions`),
|
||||
subcorpusItems: document.querySelector(`#corpus-analysis-concordance-subcorpus-items`),
|
||||
subcorpusList: document.querySelector(`#corpus-analysis-concordance-subcorpus-list`),
|
||||
subcorpusPagination: document.querySelector(`#corpus-analysis-concordance-subcorpus-pagination`)
|
||||
// TODO: Prefix elements with "corpus-analysis-app-"
|
||||
container: document.querySelector('#concordance-extension-container'),
|
||||
error: document.querySelector('#concordance-extension-error'),
|
||||
form: document.querySelector('#concordance-extension-form'),
|
||||
progress: document.querySelector('#concordance-extension-progress'),
|
||||
subcorpusInfo: document.querySelector('#concordance-extension-subcorpus-info'),
|
||||
subcorpusActions: document.querySelector('#concordance-extension-subcorpus-actions'),
|
||||
subcorpusItems: document.querySelector('#concordance-extension-subcorpus-items'),
|
||||
subcorpusList: document.querySelector('#concordance-extension-subcorpus-list'),
|
||||
subcorpusPagination: document.querySelector('#concordance-extension-subcorpus-pagination')
|
||||
};
|
||||
|
||||
this.settings = {
|
||||
context: parseInt(this.elements.userInterfaceForm['context'].value),
|
||||
perPage: parseInt(this.elements.userInterfaceForm['per-page'].value),
|
||||
context: parseInt(this.elements.form['context'].value),
|
||||
perPage: parseInt(this.elements.form['per-page'].value),
|
||||
selectedSubcorpus: undefined,
|
||||
textStyle: parseInt(this.elements.userInterfaceForm['text-style'].value),
|
||||
tokenRepresentation: this.elements.userInterfaceForm['token-representation'].value
|
||||
textStyle: parseInt(this.elements.form['text-style'].value),
|
||||
tokenRepresentation: this.elements.form['token-representation'].value
|
||||
};
|
||||
|
||||
this.app.registerExtension(this);
|
||||
}
|
||||
|
||||
async submitForm(queryModeId) {
|
||||
async submitForm() {
|
||||
this.app.disableActionElements();
|
||||
let queryBuilderQuery = nopaque.Utils.unescape(document.querySelector('#corpus-analysis-concordance-query-preview').innerHTML.trim());
|
||||
let expertModeQuery = this.elements.expertModeForm.query.value.trim();
|
||||
let query = queryModeId === 'corpus-analysis-concordance-expert-mode-form' ? expertModeQuery : queryBuilderQuery;
|
||||
let form = queryModeId === 'corpus-analysis-concordance-expert-mode-form' ? this.elements.expertModeForm : this.elements.queryBuilderForm;
|
||||
|
||||
let subcorpusName = form['subcorpus-name'].value;
|
||||
let query = this.elements.form.query.value.trim();
|
||||
let subcorpusName = this.elements.form['subcorpus-name'].value;
|
||||
this.elements.error.innerText = '';
|
||||
this.elements.error.classList.add('hide');
|
||||
this.elements.progress.classList.remove('hide');
|
||||
@ -77,29 +72,25 @@ nopaque.corpus_analysis.ConcordanceExtension = class ConcordanceExtension {
|
||||
this.data.corpus = this.app.data.corpus;
|
||||
this.data.subcorpora = {};
|
||||
// Add event listeners
|
||||
this.elements.expertModeForm.addEventListener('submit', (event) => {
|
||||
this.elements.form.addEventListener('submit', (event) => {
|
||||
event.preventDefault();
|
||||
this.submitForm(this.elements.expertModeForm.id);
|
||||
this.submitForm();
|
||||
});
|
||||
this.elements.queryBuilderForm.addEventListener('submit', (event) => {
|
||||
event.preventDefault();
|
||||
this.submitForm(this.elements.queryBuilderForm.id);
|
||||
});
|
||||
this.elements.userInterfaceForm.addEventListener('change', (event) => {
|
||||
if (event.target === this.elements.userInterfaceForm['context']) {
|
||||
this.settings.context = parseInt(this.elements.userInterfaceForm['context'].value);
|
||||
this.elements.form.addEventListener('change', (event) => {
|
||||
if (event.target === this.elements.form['context']) {
|
||||
this.settings.context = parseInt(this.elements.form['context'].value);
|
||||
this.submitForm();
|
||||
}
|
||||
if (event.target === this.elements.userInterfaceForm['per-page']) {
|
||||
this.settings.perPage = parseInt(this.elements.userInterfaceForm['per-page'].value);
|
||||
if (event.target === this.elements.form['per-page']) {
|
||||
this.settings.perPage = parseInt(this.elements.form['per-page'].value);
|
||||
this.submitForm();
|
||||
}
|
||||
if (event.target === this.elements.userInterfaceForm['text-style']) {
|
||||
this.settings.textStyle = parseInt(this.elements.userInterfaceForm['text-style'].value);
|
||||
if (event.target === this.elements.form['text-style']) {
|
||||
this.settings.textStyle = parseInt(this.elements.form['text-style'].value);
|
||||
this.setTextStyle();
|
||||
}
|
||||
if (event.target === this.elements.userInterfaceForm['token-representation']) {
|
||||
this.settings.tokenRepresentation = this.elements.userInterfaceForm['token-representation'].value;
|
||||
if (event.target === this.elements.form['token-representation']) {
|
||||
this.settings.tokenRepresentation = this.elements.form['token-representation'].value;
|
||||
this.setTokenRepresentation();
|
||||
}
|
||||
});
|
||||
@ -171,11 +162,11 @@ nopaque.corpus_analysis.ConcordanceExtension = class ConcordanceExtension {
|
||||
this.elements.subcorpusActions.querySelector('.subcorpus-export-trigger').addEventListener('click', (event) => {
|
||||
event.preventDefault();
|
||||
let subcorpus = this.data.subcorpora[this.settings.selectedSubcorpus];
|
||||
let modalElementId = nopaque.Utils.generateElementId('export-subcorpus-modal-');
|
||||
let exportFormatSelectElementId = nopaque.Utils.generateElementId('export-format-select-');
|
||||
let exportSelectedMatchesOnlyCheckboxElementId = nopaque.Utils.generateElementId('export-selected-matches-only-checkbox-');
|
||||
let exportFileNameInputElementId = nopaque.Utils.generateElementId('export-file-name-input-');
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElementId = Utils.generateElementId('export-subcorpus-modal-');
|
||||
let exportFormatSelectElementId = Utils.generateElementId('export-format-select-');
|
||||
let exportSelectedMatchesOnlyCheckboxElementId = Utils.generateElementId('export-selected-matches-only-checkbox-');
|
||||
let exportFileNameInputElementId = Utils.generateElementId('export-file-name-input-');
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal" id="${modalElementId}">
|
||||
<div class="modal-content">
|
||||
@ -387,9 +378,7 @@ nopaque.corpus_analysis.ConcordanceExtension = class ConcordanceExtension {
|
||||
document.getSelection().removeAllRanges();
|
||||
document.getSelection().addRange(range);
|
||||
});
|
||||
this.app.elements.m.extensionTabs.select(
|
||||
this.app.extensions.Reader.elements.container.id
|
||||
);
|
||||
this.app.elements.m.extensionTabs.select('reader-extension-container');
|
||||
});
|
||||
}
|
||||
for (let selectTriggerElement of this.elements.subcorpusItems.querySelectorAll('.select-trigger')) {
|
@ -1,4 +1,4 @@
|
||||
nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
class CorpusAnalysisReader {
|
||||
name = 'Reader';
|
||||
|
||||
constructor(app) {
|
||||
@ -7,18 +7,19 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
this.data = {};
|
||||
|
||||
this.elements = {
|
||||
container: document.querySelector(`#corpus-analysis-reader-container`),
|
||||
corpus: document.querySelector(`#corpus-analysis-reader-corpus`),
|
||||
corpusPagination: document.querySelector(`#corpus-analysis-reader-corpus-pagination`),
|
||||
error: document.querySelector(`#corpus-analysis-reader-error`),
|
||||
progress: document.querySelector(`#corpus-analysis-reader-progress`),
|
||||
userInterfaceForm: document.querySelector(`#corpus-analysis-reader-user-interface-form`)
|
||||
// TODO: Prefix elements with "corpus-analysis-app-"
|
||||
container: document.querySelector('#reader-extension-container'),
|
||||
error: document.querySelector('#reader-extension-error'),
|
||||
form: document.querySelector('#reader-extension-form'),
|
||||
progress: document.querySelector('#reader-extension-progress'),
|
||||
corpus: document.querySelector('#reader-extension-corpus'),
|
||||
corpusPagination: document.querySelector('#reader-extension-corpus-pagination')
|
||||
};
|
||||
|
||||
this.settings = {
|
||||
perPage: parseInt(this.elements.userInterfaceForm['per-page'].value),
|
||||
textStyle: parseInt(this.elements.userInterfaceForm['text-style'].value),
|
||||
tokenRepresentation: this.elements.userInterfaceForm['token-representation'].value,
|
||||
perPage: parseInt(this.elements.form['per-page'].value),
|
||||
textStyle: parseInt(this.elements.form['text-style'].value),
|
||||
tokenRepresentation: this.elements.form['token-representation'].value,
|
||||
pagination: {
|
||||
innerWindow: 5,
|
||||
outerWindow: 1
|
||||
@ -56,21 +57,21 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
// Init data
|
||||
this.data.corpus = this.app.data.corpus;
|
||||
// Add event listeners
|
||||
this.elements.userInterfaceForm.addEventListener('submit', (event) => {
|
||||
this.elements.form.addEventListener('submit', (event) => {
|
||||
event.preventDefault();
|
||||
this.submitForm();
|
||||
});
|
||||
this.elements.userInterfaceForm.addEventListener('change', (event) => {
|
||||
if (event.target === this.elements.userInterfaceForm['per-page']) {
|
||||
this.settings.perPage = parseInt(this.elements.userInterfaceForm['per-page'].value);
|
||||
this.elements.form.addEventListener('change', (event) => {
|
||||
if (event.target === this.elements.form['per-page']) {
|
||||
this.settings.perPage = parseInt(this.elements.form['per-page'].value);
|
||||
this.submitForm();
|
||||
}
|
||||
if (event.target === this.elements.userInterfaceForm['text-style']) {
|
||||
this.settings.textStyle = parseInt(this.elements.userInterfaceForm['text-style'].value);
|
||||
if (event.target === this.elements.form['text-style']) {
|
||||
this.settings.textStyle = parseInt(this.elements.form['text-style'].value);
|
||||
this.setTextStyle();
|
||||
}
|
||||
if (event.target === this.elements.userInterfaceForm['token-representation']) {
|
||||
this.settings.tokenRepresentation = this.elements.userInterfaceForm['token-representation'].value;
|
||||
if (event.target === this.elements.form['token-representation']) {
|
||||
this.settings.tokenRepresentation = this.elements.form['token-representation'].value;
|
||||
this.setTokenRepresentation();
|
||||
}
|
||||
});
|
||||
@ -112,7 +113,7 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
if (this.data.corpus.p.pages === 0) {return;}
|
||||
let pageElement;
|
||||
// First page button. Disables first page button if on first page
|
||||
pageElement = nopaque.Utils.HTMLToElement(
|
||||
pageElement = Utils.HTMLToElement(
|
||||
`
|
||||
<li class="${this.data.corpus.p.page === 1 ? 'disabled' : 'waves-effect'}">
|
||||
<a class="corpus-analysis-action pagination-trigger" ${this.data.corpus.p.page === 1 ? '' : 'data-target="1"'}>
|
||||
@ -123,7 +124,7 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
);
|
||||
this.elements.corpusPagination.appendChild(pageElement);
|
||||
// Previous page button. Disables previous page button if on first page
|
||||
pageElement = nopaque.Utils.HTMLToElement(
|
||||
pageElement = Utils.HTMLToElement(
|
||||
`
|
||||
<li class="${this.data.corpus.p.has_prev ? 'waves-effect' : 'disabled'}">
|
||||
<a class="corpus-analysis-action pagination-trigger" ${this.data.corpus.p.has_prev ? 'data-target="' + this.data.corpus.p.prev_num + '"' : ''}>
|
||||
@ -135,7 +136,7 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
this.elements.corpusPagination.appendChild(pageElement);
|
||||
// First page as number. Hides first page button if on first page
|
||||
if (this.data.corpus.p.page > 6) {
|
||||
pageElement = nopaque.Utils.HTMLToElement(
|
||||
pageElement = Utils.HTMLToElement(
|
||||
`
|
||||
<li class="waves-effect">
|
||||
<a class="corpus-analysis-action pagination-trigger" data-target="1">1</a>
|
||||
@ -143,14 +144,14 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
`
|
||||
);
|
||||
this.elements.corpusPagination.appendChild(pageElement);
|
||||
pageElement = nopaque.Utils.HTMLToElement("<li style='margin-top: 5px;'>…</li>");
|
||||
pageElement = Utils.HTMLToElement("<li style='margin-top: 5px;'>…</li>");
|
||||
this.elements.corpusPagination.appendChild(pageElement);
|
||||
}
|
||||
|
||||
// render page buttons (5 before and 5 after current page)
|
||||
for (let i = this.data.corpus.p.page - this.settings.pagination.innerWindow; i <= this.data.corpus.p.page; i++) {
|
||||
if (i <= 0) {continue;}
|
||||
pageElement = nopaque.Utils.HTMLToElement(
|
||||
pageElement = Utils.HTMLToElement(
|
||||
`
|
||||
<li class="${i === this.data.corpus.p.page ? 'active' : 'waves-effect'}">
|
||||
<a class="corpus-analysis-action pagination-trigger" ${i === this.data.corpus.p.page ? '' : 'data-target="' + i + '"'}>${i}</a>
|
||||
@ -161,7 +162,7 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
};
|
||||
for (let i = this.data.corpus.p.page +1; i <= this.data.corpus.p.page + this.settings.pagination.innerWindow; i++) {
|
||||
if (i > this.data.corpus.p.pages) {break;}
|
||||
pageElement = nopaque.Utils.HTMLToElement(
|
||||
pageElement = Utils.HTMLToElement(
|
||||
`
|
||||
<li class="${i === this.data.corpus.p.page ? 'active' : 'waves-effect'}">
|
||||
<a class="corpus-analysis-action pagination-trigger" ${i === this.data.corpus.p.page ? '' : 'data-target="' + i + '"'}>${i}</a>
|
||||
@ -172,9 +173,9 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
};
|
||||
// Last page as number. Hides last page button if on last page
|
||||
if (this.data.corpus.p.page < this.data.corpus.p.pages - 6) {
|
||||
pageElement = nopaque.Utils.HTMLToElement("<li style='margin-top: 5px;'>…</li>");
|
||||
pageElement = Utils.HTMLToElement("<li style='margin-top: 5px;'>…</li>");
|
||||
this.elements.corpusPagination.appendChild(pageElement);
|
||||
pageElement = nopaque.Utils.HTMLToElement(
|
||||
pageElement = Utils.HTMLToElement(
|
||||
`
|
||||
<li class="waves-effect">
|
||||
<a class="corpus-analysis-action pagination-trigger" data-target="${this.data.corpus.p.pages}">${this.data.corpus.p.pages}</a>
|
||||
@ -184,7 +185,7 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
this.elements.corpusPagination.appendChild(pageElement);
|
||||
}
|
||||
// Next page button. Disables next page button if on last page
|
||||
pageElement = nopaque.Utils.HTMLToElement(
|
||||
pageElement = Utils.HTMLToElement(
|
||||
`
|
||||
<li class="${this.data.corpus.p.has_next ? 'waves-effect' : 'disabled'}">
|
||||
<a class="corpus-analysis-action pagination-trigger" ${this.data.corpus.p.has_next ? 'data-target="' + this.data.corpus.p.next_num + '"' : ''}>
|
||||
@ -195,7 +196,7 @@ nopaque.corpus_analysis.ReaderExtension = class ReaderExtension {
|
||||
);
|
||||
this.elements.corpusPagination.appendChild(pageElement);
|
||||
// Last page button. Disables last page button if on last page
|
||||
pageElement = nopaque.Utils.HTMLToElement(
|
||||
pageElement = Utils.HTMLToElement(
|
||||
`
|
||||
<li class="${this.data.corpus.p.page === this.data.corpus.p.pages ? 'disabled' : 'waves-effect'}">
|
||||
<a class="corpus-analysis-action pagination-trigger" ${this.data.corpus.p.page === this.data.corpus.p.pages ? '' : 'data-target="' + this.data.corpus.p.pages + '"'}>
|
@ -1,4 +1,4 @@
|
||||
nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualizationExtension {
|
||||
class CorpusAnalysisStaticVisualization {
|
||||
name = 'Static Visualization (beta)';
|
||||
|
||||
constructor(app) {
|
||||
@ -75,7 +75,7 @@ nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualization
|
||||
|
||||
getStopwords() {
|
||||
this.data.promises.getStopwords = new Promise((resolve, reject) => {
|
||||
nopaque.requests.corpora.entity.getStopwords()
|
||||
Requests.corpora.entity.getStopwords()
|
||||
.then((response) => {
|
||||
response.json()
|
||||
.then((json) => {
|
||||
@ -93,8 +93,8 @@ nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualization
|
||||
|
||||
renderGeneralCorpusInfo() {
|
||||
let corpusData = this.data.corpus.o.staticData;
|
||||
document.querySelector('.corpus-num-tokens').innerHTML = corpusData.corpus.bounds[1] - corpusData.corpus.bounds[0];
|
||||
document.querySelector('.corpus-num-s').innerHTML = corpusData.s_attrs.s.lexicon.length;
|
||||
document.querySelector('.corpus-num-tokens').innerHTML = corpusData.corpus.counts.token;
|
||||
document.querySelector('.corpus-num-s').innerHTML = corpusData.corpus.counts.s;
|
||||
document.querySelector('.corpus-num-unique-words').innerHTML = Object.entries(corpusData.corpus.freqs.word).length;
|
||||
document.querySelector('.corpus-num-unique-lemmas').innerHTML = Object.entries(corpusData.corpus.freqs.lemma).length;
|
||||
document.querySelector('.corpus-num-unique-pos').innerHTML = Object.entries(corpusData.corpus.freqs.pos).length;
|
||||
@ -104,18 +104,15 @@ nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualization
|
||||
renderTextInfoList() {
|
||||
let corpusData = this.data.corpus.o.staticData;
|
||||
let corpusTextInfoListElement = document.querySelector('.corpus-text-info-list');
|
||||
let corpusTextInfoList = new nopaque.resource_lists.CorpusTextInfoList(corpusTextInfoListElement);
|
||||
let corpusTextInfoList = new CorpusTextInfoList(corpusTextInfoListElement);
|
||||
let texts = corpusData.s_attrs.text.lexicon;
|
||||
let textData = [];
|
||||
for (let i = 0; i < Object.entries(texts).length; i++) {
|
||||
let resource = {
|
||||
title: corpusData.values.s_attrs.text[i].title,
|
||||
publishing_year: corpusData.values.s_attrs.text[i].publishing_year,
|
||||
// num_sentences: corpusData.s_attrs.text.lexicon[i].counts.s,
|
||||
num_tokens: corpusData.s_attrs.text.lexicon[i].bounds[1] - corpusData.s_attrs.text.lexicon[i].bounds[0],
|
||||
num_sentences: corpusData.s_attrs.s.lexicon.filter((s) => {
|
||||
return s.bounds[0] >= corpusData.s_attrs.text.lexicon[i].bounds[0] && s.bounds[1] <= corpusData.s_attrs.text.lexicon[i].bounds[1];
|
||||
}).length,
|
||||
num_tokens: corpusData.s_attrs.text.lexicon[i].counts.token,
|
||||
num_sentences: corpusData.s_attrs.text.lexicon[i].counts.s,
|
||||
num_unique_words: Object.entries(corpusData.s_attrs.text.lexicon[i].freqs.word).length,
|
||||
num_unique_lemmas: Object.entries(corpusData.s_attrs.text.lexicon[i].freqs.lemma).length,
|
||||
num_unique_pos: Object.entries(corpusData.s_attrs.text.lexicon[i].freqs.pos).length,
|
||||
@ -128,7 +125,7 @@ nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualization
|
||||
corpusTextInfoList.add(textData);
|
||||
|
||||
let textCountChipElement = document.querySelector('.text-count-chip');
|
||||
textCountChipElement.innerHTML = `Text count: ${corpusData.s_attrs.text.lexicon.length}`;
|
||||
textCountChipElement.innerHTML = `Text count: ${corpusData.corpus.counts.text}`;
|
||||
}
|
||||
|
||||
renderTextProportionsGraphic() {
|
||||
@ -201,7 +198,7 @@ nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualization
|
||||
default:
|
||||
graphData = [
|
||||
{
|
||||
values: texts.map(text => text[1].bounds[1] - text[1].bounds[0]),
|
||||
values: texts.map(text => text[1].counts.token),
|
||||
labels: texts.map(text => `${corpusData.values.s_attrs.text[text[0]].title} (${corpusData.values.s_attrs.text[text[0]].publishing_year})`),
|
||||
type: graphtype
|
||||
}
|
||||
@ -213,7 +210,7 @@ nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualization
|
||||
|
||||
async renderTokenList() {
|
||||
let corpusTokenListElement = document.querySelector('.corpus-token-list');
|
||||
let corpusTokenList = new nopaque.resource_lists.CorpusTokenList(corpusTokenListElement);
|
||||
let corpusTokenList = new CorpusTokenList(corpusTokenListElement);
|
||||
let filteredData = this.filterData();
|
||||
let stopwords = this.data.stopwords;
|
||||
if (this.data.stopwords === undefined) {
|
||||
@ -358,7 +355,7 @@ nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualization
|
||||
if (stopwordLanguageSelection.children.length === 0) {
|
||||
Object.keys(stopwords).forEach(language => {
|
||||
if (language !== 'user_stopwords') {
|
||||
let optionElement = nopaque.Utils.HTMLToElement(`<option value="${language}" ${language === 'english' ? 'selected' : ''}>${language}</option>`);
|
||||
let optionElement = Utils.HTMLToElement(`<option value="${language}" ${language === 'english' ? 'selected' : ''}>${language}</option>`);
|
||||
stopwordLanguageSelection.appendChild(optionElement);
|
||||
}
|
||||
});
|
||||
@ -367,7 +364,7 @@ nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualization
|
||||
// Render user stopwords over input field.
|
||||
if (this.data.stopwords['user_stopwords'].length > 0) {
|
||||
for (let word of this.data.stopwords['user_stopwords']) {
|
||||
let chipElement = nopaque.Utils.HTMLToElement(`<div class="chip">${word}<i class="close material-icons">close</i></div>`);
|
||||
let chipElement = Utils.HTMLToElement(`<div class="chip">${word}<i class="close material-icons">close</i></div>`);
|
||||
chipElement.addEventListener('click', (event) => {
|
||||
let removedListItem = event.target.closest('.chip').firstChild.textContent;
|
||||
this.data.stopwords['user_stopwords'] = structuredClone(this.data.stopwords['user_stopwords'].filter(item => item !== removedListItem));
|
||||
@ -433,7 +430,7 @@ nopaque.corpus_analysis.StaticVisualizationExtension = class StaticVisualization
|
||||
let stopwordLanguageChipList = document.querySelector('#stopword-language-chip-list');
|
||||
stopwordLanguageChipList.innerHTML = '';
|
||||
for (let word of stopwords) {
|
||||
let chipElement = nopaque.Utils.HTMLToElement(`<div class="chip">${word}<i class="close material-icons">close</i></div>`);
|
||||
let chipElement = Utils.HTMLToElement(`<div class="chip">${word}<i class="close material-icons">close</i></div>`);
|
||||
chipElement.addEventListener('click', (event) => {
|
||||
let removedListItem = event.target.closest('.chip').firstChild.textContent;
|
||||
this.data.stopwords[language] = structuredClone(this.data.stopwords[language].filter(item => item !== removedListItem));
|
1007
app/static/js/CorpusAnalysis/QueryBuilder.js
Normal file
18
app/static/js/Forms/CreateContributionForm.js
Normal file
@ -0,0 +1,18 @@
|
||||
class CreateContributionForm extends Form {
|
||||
static autoInit() {
|
||||
let createContributionFormElements = document.querySelectorAll('.create-contribution-form');
|
||||
for (let createContributionFormElement of createContributionFormElements) {
|
||||
new CreateContributionForm(createContributionFormElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(formElement) {
|
||||
super(formElement);
|
||||
|
||||
this.addEventListener('requestLoad', (event) => {
|
||||
if (event.target.status === 201) {
|
||||
window.location.href = event.target.getResponseHeader('Location');
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
18
app/static/js/Forms/CreateCorpusFileForm.js
Normal file
@ -0,0 +1,18 @@
|
||||
class CreateCorpusFileForm extends Form {
|
||||
static autoInit() {
|
||||
let createCorpusFileFormElements = document.querySelectorAll('.create-corpus-file-form');
|
||||
for (let createCorpusFileFormElement of createCorpusFileFormElements) {
|
||||
new CreateCorpusFileForm(createCorpusFileFormElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(formElement) {
|
||||
super(formElement);
|
||||
|
||||
this.addEventListener('requestLoad', (event) => {
|
||||
if (event.target.status === 201) {
|
||||
window.location.href = event.target.getResponseHeader('Location');
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,5 +1,10 @@
|
||||
nopaque.forms.CreateJobForm = class CreateJobForm extends nopaque.forms.BaseForm {
|
||||
static htmlClass = 'create-job-form';
|
||||
class CreateJobForm extends Form {
|
||||
static autoInit() {
|
||||
let createJobFormElements = document.querySelectorAll('.create-job-form');
|
||||
for (let createJobFormElement of createJobFormElements) {
|
||||
new CreateJobForm(createJobFormElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(formElement) {
|
||||
super(formElement);
|
||||
@ -17,4 +22,4 @@ nopaque.forms.CreateJobForm = class CreateJobForm extends nopaque.forms.BaseForm
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
nopaque.forms.BaseForm = class BaseForm {
|
||||
static htmlClass;
|
||||
class Form {
|
||||
static autoInit() {
|
||||
CreateContributionForm.autoInit();
|
||||
CreateCorpusFileForm.autoInit();
|
||||
CreateJobForm.autoInit();
|
||||
}
|
||||
|
||||
constructor(formElement) {
|
||||
this.formElement = formElement;
|
||||
@ -28,7 +32,7 @@ nopaque.forms.BaseForm = class BaseForm {
|
||||
|
||||
submit(event) {
|
||||
let request = new XMLHttpRequest();
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal">
|
||||
<div class="modal-content">
|
||||
@ -67,7 +71,7 @@ nopaque.forms.BaseForm = class BaseForm {
|
||||
for (let selectElement of this.formElement.querySelectorAll('select')) {
|
||||
if (selectElement.value === '') {
|
||||
let inputFieldElement = selectElement.closest('.input-field');
|
||||
let errorHelperTextElement = nopaque.Utils.HTMLToElement(
|
||||
let errorHelperTextElement = Utils.HTMLToElement(
|
||||
'<span class="helper-text error-color-text" data-helper-text-type="error">Please select an option.</span>'
|
||||
);
|
||||
inputFieldElement.appendChild(errorHelperTextElement);
|
||||
@ -93,7 +97,7 @@ nopaque.forms.BaseForm = class BaseForm {
|
||||
.querySelector(`input[name$="${inputName}"], select[name$="${inputName}"]`)
|
||||
.closest('.input-field');
|
||||
for (let inputError of inputErrors) {
|
||||
let errorHelperTextElement = nopaque.Utils.HTMLToElement(
|
||||
let errorHelperTextElement = Utils.HTMLToElement(
|
||||
`<span class="helper-text error-color-text" data-helper-type="error">${inputError}</span>`
|
||||
);
|
||||
inputFieldElement.appendChild(errorHelperTextElement);
|
||||
@ -135,4 +139,4 @@ nopaque.forms.BaseForm = class BaseForm {
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
nopaque.requests = {};
|
||||
let Requests = {};
|
||||
|
||||
nopaque.requests.JSONfetch = (input, init={}) => {
|
||||
Requests.JSONfetch = (input, init={}) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let fixedInit = {};
|
||||
fixedInit.headers = {};
|
||||
@ -8,7 +8,7 @@ nopaque.requests.JSONfetch = (input, init={}) => {
|
||||
if (init.hasOwnProperty('body')) {
|
||||
fixedInit.headers['Content-Type'] = 'application/json';
|
||||
}
|
||||
fetch(input, nopaque.Utils.mergeObjectsDeep(init, fixedInit))
|
||||
fetch(input, Utils.mergeObjectsDeep(init, fixedInit))
|
||||
.then(
|
||||
(response) => {
|
||||
if (response.ok) {
|
20
app/static/js/Requests/admin/admin.js
Normal file
@ -0,0 +1,20 @@
|
||||
/*****************************************************************************
|
||||
* Admin *
|
||||
* Fetch requests for /admin routes *
|
||||
*****************************************************************************/
|
||||
Requests.admin = {};
|
||||
|
||||
Requests.admin.users = {};
|
||||
|
||||
Requests.admin.users.entity = {};
|
||||
|
||||
Requests.admin.users.entity.confirmed = {};
|
||||
|
||||
Requests.admin.users.entity.confirmed.update = (userId, value) => {
|
||||
let input = `/admin/users/${userId}/confirmed`;
|
||||
let init = {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(value)
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
5
app/static/js/Requests/contributions/contributions.js
Normal file
@ -0,0 +1,5 @@
|
||||
/*****************************************************************************
|
||||
* Contributions *
|
||||
* Fetch requests for /contributions routes *
|
||||
*****************************************************************************/
|
||||
Requests.contributions = {};
|
@ -0,0 +1,26 @@
|
||||
/*****************************************************************************
|
||||
* SpaCy NLP Pipeline Models *
|
||||
* Fetch requests for /contributions/spacy-nlp-pipeline-models routes *
|
||||
*****************************************************************************/
|
||||
Requests.contributions.spacy_nlp_pipeline_models = {};
|
||||
|
||||
Requests.contributions.spacy_nlp_pipeline_models.entity = {};
|
||||
|
||||
Requests.contributions.spacy_nlp_pipeline_models.entity.delete = (spacyNlpPipelineModelId) => {
|
||||
let input = `/contributions/spacy-nlp-pipeline-models/${spacyNlpPipelineModelId}`;
|
||||
let init = {
|
||||
method: 'DELETE'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
Requests.contributions.spacy_nlp_pipeline_models.entity.isPublic = {};
|
||||
|
||||
Requests.contributions.spacy_nlp_pipeline_models.entity.isPublic.update = (spacyNlpPipelineModelId, value) => {
|
||||
let input = `/contributions/spacy-nlp-pipeline-models/${spacyNlpPipelineModelId}/is_public`;
|
||||
let init = {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(value)
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
@ -0,0 +1,26 @@
|
||||
/*****************************************************************************
|
||||
* Tesseract OCR Pipeline Models *
|
||||
* Fetch requests for /contributions/tesseract-ocr-pipeline-models routes *
|
||||
*****************************************************************************/
|
||||
Requests.contributions.tesseract_ocr_pipeline_models = {};
|
||||
|
||||
Requests.contributions.tesseract_ocr_pipeline_models.entity = {};
|
||||
|
||||
Requests.contributions.tesseract_ocr_pipeline_models.entity.delete = (tesseractOcrPipelineModelId) => {
|
||||
let input = `/contributions/tesseract-ocr-pipeline-models/${tesseractOcrPipelineModelId}`;
|
||||
let init = {
|
||||
method: 'DELETE'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
Requests.contributions.tesseract_ocr_pipeline_models.entity.isPublic = {};
|
||||
|
||||
Requests.contributions.tesseract_ocr_pipeline_models.entity.isPublic.update = (tesseractOcrPipelineModelId, value) => {
|
||||
let input = `/contributions/tesseract-ocr-pipeline-models/${tesseractOcrPipelineModelId}/is_public`;
|
||||
let init = {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(value)
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
53
app/static/js/Requests/corpora/corpora.js
Normal file
@ -0,0 +1,53 @@
|
||||
/*****************************************************************************
|
||||
* Corpora *
|
||||
* Fetch requests for /corpora routes *
|
||||
*****************************************************************************/
|
||||
Requests.corpora = {};
|
||||
|
||||
Requests.corpora.entity = {};
|
||||
|
||||
Requests.corpora.entity.delete = (corpusId) => {
|
||||
let input = `/corpora/${corpusId}`;
|
||||
let init = {
|
||||
method: 'DELETE'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
Requests.corpora.entity.build = (corpusId) => {
|
||||
let input = `/corpora/${corpusId}/build`;
|
||||
let init = {
|
||||
method: 'POST',
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
Requests.corpora.entity.generateShareLink = (corpusId, role, expiration) => {
|
||||
let input = `/corpora/${corpusId}/generate-share-link`;
|
||||
let init = {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({role: role, expiration: expiration})
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
Requests.corpora.entity.getStopwords = () => {
|
||||
let input = `/corpora/stopwords`;
|
||||
let init = {
|
||||
method: 'GET'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
Requests.corpora.entity.isPublic = {};
|
||||
|
||||
Requests.corpora.entity.isPublic.update = (corpusId, isPublic) => {
|
||||
let input = `/corpora/${corpusId}/is_public`;
|
||||
let init = {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(isPublic)
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
|
15
app/static/js/Requests/corpora/files.js
Normal file
@ -0,0 +1,15 @@
|
||||
/*****************************************************************************
|
||||
* Corpora *
|
||||
* Fetch requests for /corpora/<entity>/files routes *
|
||||
*****************************************************************************/
|
||||
Requests.corpora.entity.files = {};
|
||||
|
||||
Requests.corpora.entity.files.ent = {};
|
||||
|
||||
Requests.corpora.entity.files.ent.delete = (corpusId, corpusFileId) => {
|
||||
let input = `/corpora/${corpusId}/files/${corpusFileId}`;
|
||||
let init = {
|
||||
method: 'DELETE',
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
35
app/static/js/Requests/corpora/followers.js
Normal file
@ -0,0 +1,35 @@
|
||||
/*****************************************************************************
|
||||
* Corpora *
|
||||
* Fetch requests for /corpora/<entity>/followers routes *
|
||||
*****************************************************************************/
|
||||
Requests.corpora.entity.followers = {};
|
||||
|
||||
Requests.corpora.entity.followers.add = (corpusId, usernames) => {
|
||||
let input = `/corpora/${corpusId}/followers`;
|
||||
let init = {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(usernames)
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
Requests.corpora.entity.followers.entity = {};
|
||||
|
||||
Requests.corpora.entity.followers.entity.delete = (corpusId, followerId) => {
|
||||
let input = `/corpora/${corpusId}/followers/${followerId}`;
|
||||
let init = {
|
||||
method: 'DELETE',
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
Requests.corpora.entity.followers.entity.role = {};
|
||||
|
||||
Requests.corpora.entity.followers.entity.role.update = (corpusId, followerId, value) => {
|
||||
let input = `/corpora/${corpusId}/followers/${followerId}/role`;
|
||||
let init = {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(value)
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
31
app/static/js/Requests/jobs/jobs.js
Normal file
@ -0,0 +1,31 @@
|
||||
/*****************************************************************************
|
||||
* Jobs *
|
||||
* Fetch requests for /jobs routes *
|
||||
*****************************************************************************/
|
||||
Requests.jobs = {};
|
||||
|
||||
Requests.jobs.entity = {};
|
||||
|
||||
Requests.jobs.entity.delete = (jobId) => {
|
||||
let input = `/jobs/${jobId}`;
|
||||
let init = {
|
||||
method: 'DELETE'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
}
|
||||
|
||||
Requests.jobs.entity.log = (jobId) => {
|
||||
let input = `/jobs/${jobId}/log`;
|
||||
let init = {
|
||||
method: 'GET'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
}
|
||||
|
||||
Requests.jobs.entity.restart = (jobId) => {
|
||||
let input = `/jobs/${jobId}/restart`;
|
||||
let init = {
|
||||
method: 'POST'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
}
|
17
app/static/js/Requests/users/settings.js
Normal file
@ -0,0 +1,17 @@
|
||||
/*****************************************************************************
|
||||
* Settings *
|
||||
* Fetch requests for /users/<entity>/settings routes *
|
||||
*****************************************************************************/
|
||||
Requests.users.entity.settings = {};
|
||||
|
||||
Requests.users.entity.settings.profilePrivacy = {};
|
||||
|
||||
Requests.users.entity.settings.profilePrivacy.update = (userId, profilePrivacySetting, enabled) => {
|
||||
let input = `/users/${userId}/settings/profile-privacy/${profilePrivacySetting}`;
|
||||
let init = {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(enabled)
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
35
app/static/js/Requests/users/users.js
Normal file
@ -0,0 +1,35 @@
|
||||
/*****************************************************************************
|
||||
* Users *
|
||||
* Fetch requests for /users routes *
|
||||
*****************************************************************************/
|
||||
Requests.users = {};
|
||||
|
||||
Requests.users.entity = {};
|
||||
|
||||
Requests.users.entity.delete = (userId) => {
|
||||
let input = `/users/${userId}`;
|
||||
let init = {
|
||||
method: 'DELETE'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
Requests.users.entity.acceptTermsOfUse = () => {
|
||||
let input = `/users/accept-terms-of-use`;
|
||||
let init = {
|
||||
method: 'POST'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
};
|
||||
|
||||
|
||||
Requests.users.entity.avatar = {};
|
||||
|
||||
Requests.users.entity.avatar.delete = (userId) => {
|
||||
let input = `/users/${userId}/avatar`;
|
||||
let init = {
|
||||
method: 'DELETE'
|
||||
};
|
||||
return Requests.JSONfetch(input, init);
|
||||
}
|
||||
|
@ -1,13 +1,11 @@
|
||||
nopaque.resource_displays.CorpusDisplay = class CorpusDisplay extends nopaque.resource_displays.ResourceDisplay {
|
||||
static htmlClass = 'corpus-display';
|
||||
|
||||
class CorpusDisplay extends ResourceDisplay {
|
||||
constructor(displayElement) {
|
||||
super(displayElement);
|
||||
this.corpusId = displayElement.dataset.corpusId;
|
||||
this.displayElement
|
||||
.querySelector('.action-button[data-action="build-request"]')
|
||||
.addEventListener('click', (event) => {
|
||||
nopaque.requests.corpora.entity.build(this.corpusId);
|
||||
Requests.corpora.entity.build(this.corpusId);
|
||||
});
|
||||
}
|
||||
|
||||
@ -104,4 +102,4 @@ nopaque.resource_displays.CorpusDisplay = class CorpusDisplay extends nopaque.re
|
||||
new Date(creationDate).toLocaleString("en-US")
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
@ -1,6 +1,4 @@
|
||||
nopaque.resource_displays.JobDisplay = class JobDisplay extends nopaque.resource_displays.ResourceDisplay {
|
||||
static htmlClass = 'job-display';
|
||||
|
||||
class JobDisplay extends ResourceDisplay {
|
||||
constructor(displayElement) {
|
||||
super(displayElement);
|
||||
this.jobId = this.displayElement.dataset.jobId;
|
||||
@ -125,4 +123,4 @@ nopaque.resource_displays.JobDisplay = class JobDisplay extends nopaque.resource
|
||||
setServiceVersion(serviceVersion) {
|
||||
this.setElements(this.displayElement.querySelectorAll('.job-service-version'), serviceVersion);
|
||||
}
|
||||
};
|
||||
}
|
@ -1,6 +1,4 @@
|
||||
nopaque.resource_displays.ResourceDisplay = class ResourceDisplay {
|
||||
static htmlClass;
|
||||
|
||||
class ResourceDisplay {
|
||||
constructor(displayElement) {
|
||||
this.displayElement = displayElement;
|
||||
this.userId = this.displayElement.dataset.userId;
|
||||
@ -43,4 +41,4 @@ nopaque.resource_displays.ResourceDisplay = class ResourceDisplay {
|
||||
this.setElement(element, value);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
nopaque.resource_lists.AdminUserList = class AdminUserList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'admin-user-list';
|
||||
class AdminUserList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let adminUserListElement of document.querySelectorAll('.admin-user-list:not(.no-autoinit)')) {
|
||||
new AdminUserList(adminUserListElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
@ -37,9 +41,9 @@ nopaque.resource_lists.AdminUserList = class AdminUserList extends nopaque.resou
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('user-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('user-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -87,7 +91,7 @@ nopaque.resource_lists.AdminUserList = class AdminUserList extends nopaque.resou
|
||||
let listAction = listActionElement === null ? 'view' : listActionElement.dataset.listAction;
|
||||
switch (listAction) {
|
||||
case 'delete': {
|
||||
nopaque.requests.users.entity.delete(itemId);
|
||||
Requests.users.entity.delete(itemId);
|
||||
if (itemId === currentUserId) {window.location.href = '/';}
|
||||
break;
|
||||
}
|
||||
@ -104,4 +108,4 @@ nopaque.resource_lists.AdminUserList = class AdminUserList extends nopaque.resou
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
nopaque.resource_lists.CorpusFileList = class CorpusFileList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'corpus-file-list';
|
||||
class CorpusFileList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let corpusFileListElement of document.querySelectorAll('.corpus-file-list:not(.no-autoinit)')) {
|
||||
new CorpusFileList(corpusFileListElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
@ -62,9 +66,9 @@ nopaque.resource_lists.CorpusFileList = class CorpusFileList extends nopaque.res
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('corpus-file-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('corpus-file-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -121,7 +125,7 @@ nopaque.resource_lists.CorpusFileList = class CorpusFileList extends nopaque.res
|
||||
switch (listAction) {
|
||||
case 'delete': {
|
||||
let values = this.listjs.get('id', itemId)[0].values();
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal">
|
||||
<div class="modal-content">
|
||||
@ -149,12 +153,12 @@ nopaque.resource_lists.CorpusFileList = class CorpusFileList extends nopaque.res
|
||||
let confirmElement = modalElement.querySelector('.action-button[data-action="confirm"]');
|
||||
confirmElement.addEventListener('click', (event) => {
|
||||
if (currentUserId != this.userId) {
|
||||
nopaque.requests.corpora.entity.files.ent.delete(this.corpusId, itemId)
|
||||
Requests.corpora.entity.files.ent.delete(this.corpusId, itemId)
|
||||
.then(() => {
|
||||
window.location.reload();
|
||||
});
|
||||
} else {
|
||||
nopaque.requests.corpora.entity.files.ent.delete(this.corpusId, itemId)
|
||||
Requests.corpora.entity.files.ent.delete(this.corpusId, itemId)
|
||||
}
|
||||
});
|
||||
modal.open();
|
||||
@ -208,7 +212,7 @@ nopaque.resource_lists.CorpusFileList = class CorpusFileList extends nopaque.res
|
||||
break;
|
||||
}
|
||||
case 'delete': {
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal">
|
||||
<div class="modal-content">
|
||||
@ -229,7 +233,7 @@ nopaque.resource_lists.CorpusFileList = class CorpusFileList extends nopaque.res
|
||||
this.selectedItemIds.forEach(selectedItemId => {
|
||||
let listItem = this.listjs.get('id', selectedItemId)[0].elm;
|
||||
let values = this.listjs.get('id', listItem.dataset.id)[0].values();
|
||||
let itemElement = nopaque.Utils.HTMLToElement(`<li> - ${values.title}</li>`);
|
||||
let itemElement = Utils.HTMLToElement(`<li> - ${values.title}</li>`);
|
||||
itemList.appendChild(itemElement);
|
||||
});
|
||||
let modal = M.Modal.init(
|
||||
@ -246,12 +250,12 @@ nopaque.resource_lists.CorpusFileList = class CorpusFileList extends nopaque.res
|
||||
confirmElement.addEventListener('click', (event) => {
|
||||
this.selectedItemIds.forEach(selectedItemId => {
|
||||
if (currentUserId != this.userId) {
|
||||
nopaque.requests.corpora.entity.files.ent.delete(this.corpusId, selectedItemId)
|
||||
Requests.corpora.entity.files.ent.delete(this.corpusId, selectedItemId)
|
||||
.then(() => {
|
||||
window.location.reload();
|
||||
});
|
||||
} else {
|
||||
nopaque.requests.corpora.entity.files.ent.delete(this.corpusId, selectedItemId);
|
||||
Requests.corpora.entity.files.ent.delete(this.corpusId, selectedItemId);
|
||||
}
|
||||
});
|
||||
this.selectedItemIds.clear();
|
||||
@ -365,4 +369,4 @@ nopaque.resource_lists.CorpusFileList = class CorpusFileList extends nopaque.res
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
nopaque.resource_lists.CorpusFollowerList = class CorpusFollowerList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'corpus-follower-list';
|
||||
class CorpusFollowerList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let corpusFollowerListElement of document.querySelectorAll('.corpus-follower-list:not(.no-autoinit)')) {
|
||||
new CorpusFollowerList(corpusFollowerListElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
@ -18,7 +22,7 @@ nopaque.resource_lists.CorpusFollowerList = class CorpusFollowerList extends nop
|
||||
});
|
||||
});
|
||||
app.getUser(this.userId).then((user) => {
|
||||
// let corpusFollowerAssociations = Object.values(user.corpora[this.corpusId].corpus_follower_associations);
|
||||
let corpusFollowerAssociations = Object.values(user.corpora[this.corpusId].corpus_follower_associations);
|
||||
// let filteredList = corpusFollowerAssociations.filter(association => association.follower.id != currentUserId);
|
||||
// this.add(filteredList);
|
||||
this.add(Object.values(user.corpora[this.corpusId].corpus_follower_associations));
|
||||
@ -68,9 +72,9 @@ nopaque.resource_lists.CorpusFollowerList = class CorpusFollowerList extends nop
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('corpus-follower-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('corpus-follower-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -120,7 +124,7 @@ nopaque.resource_lists.CorpusFollowerList = class CorpusFollowerList extends nop
|
||||
case 'update-role': {
|
||||
let followerId = listItemElement.dataset.followerId;
|
||||
let roleName = event.target.value;
|
||||
nopaque.requests.corpora.entity.followers.entity.role.update(this.corpusId, followerId, roleName);
|
||||
Requests.corpora.entity.followers.entity.role.update(this.corpusId, followerId, roleName);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
@ -140,12 +144,12 @@ nopaque.resource_lists.CorpusFollowerList = class CorpusFollowerList extends nop
|
||||
case 'unfollow-request': {
|
||||
let followerId = listItemElement.dataset.followerId;
|
||||
if (currentUserId != this.userId) {
|
||||
nopaque.requests.corpora.entity.followers.entity.delete(this.corpusId, followerId)
|
||||
Requests.corpora.entity.followers.entity.delete(this.corpusId, followerId)
|
||||
.then(() => {
|
||||
window.location.reload();
|
||||
});
|
||||
} else {
|
||||
nopaque.requests.corpora.entity.followers.entity.delete(this.corpusId, followerId);
|
||||
Requests.corpora.entity.followers.entity.delete(this.corpusId, followerId);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@ -192,4 +196,4 @@ nopaque.resource_lists.CorpusFollowerList = class CorpusFollowerList extends nop
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
nopaque.resource_lists.CorpusList = class CorpusList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'corpus-list';
|
||||
class CorpusList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let corpusListElement of document.querySelectorAll('.corpus-list:not(.no-autoinit)')) {
|
||||
new CorpusList(corpusListElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
@ -93,9 +97,9 @@ nopaque.resource_lists.CorpusList = class CorpusList extends nopaque.resource_li
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('corpus-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('corpus-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -139,7 +143,7 @@ nopaque.resource_lists.CorpusList = class CorpusList extends nopaque.resource_li
|
||||
switch (listAction) {
|
||||
case 'delete-request': {
|
||||
let values = this.listjs.get('id', itemId)[0].values();
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal">
|
||||
<div class="modal-content">
|
||||
@ -167,12 +171,12 @@ nopaque.resource_lists.CorpusList = class CorpusList extends nopaque.resource_li
|
||||
let confirmElement = modalElement.querySelector('.action-button[data-action="confirm"]');
|
||||
confirmElement.addEventListener('click', (event) => {
|
||||
if (!values['is-owner']) {
|
||||
nopaque.requests.corpora.entity.followers.entity.delete(itemId, currentUserId)
|
||||
Requests.corpora.entity.followers.entity.delete(itemId, currentUserId)
|
||||
.then((response) => {
|
||||
window.location.reload();
|
||||
});
|
||||
} else {
|
||||
nopaque.requests.corpora.entity.delete(itemId);
|
||||
Requests.corpora.entity.delete(itemId);
|
||||
}
|
||||
});
|
||||
modal.open();
|
||||
@ -224,7 +228,7 @@ nopaque.resource_lists.CorpusList = class CorpusList extends nopaque.resource_li
|
||||
// Saved for future use:
|
||||
// <p class="hide">Do you really want to unfollow this Corpora?</p>
|
||||
// <ul id="selected-unfollow-items-list"></ul>
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal">
|
||||
<div class="modal-content">
|
||||
@ -245,7 +249,7 @@ nopaque.resource_lists.CorpusList = class CorpusList extends nopaque.resource_li
|
||||
this.selectedItemIds.forEach(selectedItemId => {
|
||||
let listItem = this.listjs.get('id', selectedItemId)[0].elm;
|
||||
let values = this.listjs.get('id', listItem.dataset.id)[0].values();
|
||||
let itemElement = nopaque.Utils.HTMLToElement(`<li> - ${values.title}</li>`);
|
||||
let itemElement = Utils.HTMLToElement(`<li> - ${values.title}</li>`);
|
||||
// if (!values['is-owner']) {
|
||||
// itemUnfollowList.appendChild(itemElement);
|
||||
// } else {
|
||||
@ -268,9 +272,9 @@ nopaque.resource_lists.CorpusList = class CorpusList extends nopaque.resource_li
|
||||
let listItem = this.listjs.get('id', selectedItemId)[0].elm;
|
||||
let values = this.listjs.get('id', listItem.dataset.id)[0].values();
|
||||
if (values['is-owner']) {
|
||||
nopaque.requests.corpora.entity.delete(selectedItemId);
|
||||
Requests.corpora.entity.delete(selectedItemId);
|
||||
} else {
|
||||
nopaque.requests.corpora.entity.followers.entity.delete(selectedItemId, currentUserId);
|
||||
Requests.corpora.entity.followers.entity.delete(selectedItemId, currentUserId);
|
||||
setTimeout(() => {
|
||||
window.location.reload();
|
||||
}, 1000);
|
||||
@ -366,4 +370,4 @@ nopaque.resource_lists.CorpusList = class CorpusList extends nopaque.resource_li
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,13 +1,18 @@
|
||||
nopaque.resource_lists.CorpusTextInfoList = class CorpusTextInfoList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'corpus-text-info-list';
|
||||
class CorpusTextInfoList extends ResourceList {
|
||||
|
||||
static autoInit() {
|
||||
for (let corpusTextInfoListElement of document.querySelectorAll('.corpus-text-info-list:not(.no-autoinit)')) {
|
||||
new CorpusTextInfoList(corpusTextInfoListElement);
|
||||
}
|
||||
}
|
||||
|
||||
static defaultOptions = {
|
||||
page: 5
|
||||
};
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
let _options = nopaque.Utils.mergeObjectsDeep(
|
||||
nopaque.resource_lists.CorpusTextInfoList.defaultOptions,
|
||||
let _options = Utils.mergeObjectsDeep(
|
||||
CorpusTextInfoList.defaultOptions,
|
||||
options
|
||||
);
|
||||
super(listContainerElement, _options);
|
||||
@ -21,7 +26,7 @@ nopaque.resource_lists.CorpusTextInfoList = class CorpusTextInfoList extends nop
|
||||
get item() {
|
||||
return (values) => {
|
||||
return `
|
||||
<tr class="list-item hoverable">
|
||||
<tr class="list-item clickable hoverable">
|
||||
<td><span class="title"></span> (<span class="publishing_year"></span>)</td>
|
||||
<td><span class="num_tokens"></span></td>
|
||||
<td><span class="num_sentences"></span></td>
|
||||
@ -49,9 +54,9 @@ nopaque.resource_lists.CorpusTextInfoList = class CorpusTextInfoList extends nop
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('corpus-file-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('corpus-file-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -104,4 +109,4 @@ nopaque.resource_lists.CorpusTextInfoList = class CorpusTextInfoList extends nop
|
||||
clickedSortElement.style.color = '#aa9cc9';
|
||||
clickedSortElement.innerHTML = clickedSortElement.classList.contains('asc') ? 'arrow_drop_down' : 'arrow_drop_up';
|
||||
}
|
||||
};
|
||||
}
|
@ -1,13 +1,17 @@
|
||||
nopaque.resource_lists.CorpusTokenList = class CorpusTokenList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'corpus-token-list';
|
||||
class CorpusTokenList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let corpusTokenListElement of document.querySelectorAll('.corpus-token-list:not(.no-autoinit)')) {
|
||||
new CorpusTokenList(corpusTokenListElement);
|
||||
}
|
||||
}
|
||||
|
||||
static defaultOptions = {
|
||||
page: 7
|
||||
};
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
let _options = nopaque.Utils.mergeObjectsDeep(
|
||||
nopaque.resource_lists.CorpusTokenList.defaultOptions,
|
||||
let _options = Utils.mergeObjectsDeep(
|
||||
CorpusTokenList.defaultOptions,
|
||||
options
|
||||
);
|
||||
super(listContainerElement, _options);
|
||||
@ -72,9 +76,9 @@ nopaque.resource_lists.CorpusTokenList = class CorpusTokenList extends nopaque.r
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('corpus-token-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('corpus-token-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -134,4 +138,4 @@ nopaque.resource_lists.CorpusTokenList = class CorpusTokenList extends nopaque.r
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
}
|
@ -1,6 +1,4 @@
|
||||
nopaque.resource_lists.DetailedPublicCorpusList = class DetailedPublicCorpusList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'detailed-public-corpus-list';
|
||||
|
||||
class DetailledPublicCorpusList extends CorpusList {
|
||||
get item() {
|
||||
return (values) => {
|
||||
return `
|
||||
@ -32,9 +30,9 @@ nopaque.resource_lists.DetailedPublicCorpusList = class DetailedPublicCorpusList
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('corpus-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('corpus-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -70,4 +68,4 @@ nopaque.resource_lists.DetailedPublicCorpusList = class DetailedPublicCorpusList
|
||||
'current-user-is-following': Object.values(corpus.corpus_follower_associations).some(association => association.follower.id === currentUserId)
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
nopaque.resource_lists.JobInputList = class JobInputList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'job-input-list';
|
||||
class JobInputList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let jobInputListElement of document.querySelectorAll('.job-input-list:not(.no-autoinit)')) {
|
||||
new JobInputList(jobInputListElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
@ -36,9 +40,9 @@ nopaque.resource_lists.JobInputList = class JobInputList extends nopaque.resourc
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('job-input-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('job-input-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -86,4 +90,4 @@ nopaque.resource_lists.JobInputList = class JobInputList extends nopaque.resourc
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'job-list';
|
||||
class JobList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let jobListElement of document.querySelectorAll('.job-list:not(.no-autoinit)')) {
|
||||
new JobList(jobListElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
@ -25,7 +29,7 @@ nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.Re
|
||||
|
||||
get item() {
|
||||
return `
|
||||
<tr class="list-item service-scheme clickable hoverable">
|
||||
<tr class="list-item service-scheme">
|
||||
<td>
|
||||
<label class="list-action-trigger" data-list-action="select">
|
||||
<input class="select-checkbox" type="checkbox">
|
||||
@ -56,9 +60,9 @@ nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.Re
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('job-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('job-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -108,11 +112,11 @@ nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.Re
|
||||
if (listItemElement === null) {return;}
|
||||
let itemId = listItemElement.dataset.id;
|
||||
let listActionElement = event.target.closest('.list-action-trigger[data-list-action]');
|
||||
let listAction = listActionElement === null ? 'view' : listActionElement.dataset.listAction;
|
||||
let listAction = listActionElement === null ? '' : listActionElement.dataset.listAction;
|
||||
switch (listAction) {
|
||||
case 'delete-request': {
|
||||
let values = this.listjs.get('id', itemId)[0].values();
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal">
|
||||
<div class="modal-content">
|
||||
@ -139,7 +143,7 @@ nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.Re
|
||||
);
|
||||
let confirmElement = modalElement.querySelector('.action-button[data-action="confirm"]');
|
||||
confirmElement.addEventListener('click', (event) => {
|
||||
nopaque.requests.jobs.entity.delete(itemId);
|
||||
Requests.jobs.entity.delete(itemId);
|
||||
});
|
||||
modal.open();
|
||||
break;
|
||||
@ -187,7 +191,7 @@ nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.Re
|
||||
break;
|
||||
}
|
||||
case 'delete': {
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal">
|
||||
<div class="modal-content">
|
||||
@ -208,7 +212,7 @@ nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.Re
|
||||
this.selectedItemIds.forEach(selectedItemId => {
|
||||
let listItem = this.listjs.get('id', selectedItemId)[0].elm;
|
||||
let values = this.listjs.get('id', listItem.dataset.id)[0].values();
|
||||
let itemElement = nopaque.Utils.HTMLToElement(`<li> - ${values.title}</li>`);
|
||||
let itemElement = Utils.HTMLToElement(`<li> - ${values.title}</li>`);
|
||||
itemList.appendChild(itemElement);
|
||||
});
|
||||
let modal = M.Modal.init(
|
||||
@ -224,7 +228,7 @@ nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.Re
|
||||
let confirmElement = modalElement.querySelector('.action-button[data-action="confirm"]');
|
||||
confirmElement.addEventListener('click', (event) => {
|
||||
this.selectedItemIds.forEach(selectedItemId => {
|
||||
nopaque.requests.jobs.entity.delete(selectedItemId);
|
||||
Requests.jobs.entity.delete(selectedItemId);
|
||||
});
|
||||
this.selectedItemIds.clear();
|
||||
this.renderingItemSelection();
|
||||
@ -319,4 +323,4 @@ nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.Re
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
nopaque.resource_lists.JobResultList = class JobResultList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'job-result-list';
|
||||
class JobResultList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let jobResultListElement of document.querySelectorAll('.job-result-list:not(.no-autoinit)')) {
|
||||
new JobResultList(jobResultListElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
@ -42,9 +46,9 @@ nopaque.resource_lists.JobResultList = class JobResultList extends nopaque.resou
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('job-result-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('job-result-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -111,4 +115,4 @@ nopaque.resource_lists.JobResultList = class JobResultList extends nopaque.resou
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,11 +1,4 @@
|
||||
nopaque.resource_lists.PublicCorpusList = class PublicCorpusList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'public-corpus-list';
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
this.listjs.list.addEventListener('click', (event) => {this.onClick(event)});
|
||||
}
|
||||
|
||||
class PublicCorpusList extends CorpusList {
|
||||
get item() {
|
||||
return (values) => {
|
||||
return `
|
||||
@ -21,19 +14,6 @@ nopaque.resource_lists.PublicCorpusList = class PublicCorpusList extends nopaque
|
||||
};
|
||||
}
|
||||
|
||||
get valueNames() {
|
||||
return [
|
||||
{data: ['id']},
|
||||
{data: ['creation-date']},
|
||||
{name: 'status', attr: 'data-status'},
|
||||
'description',
|
||||
'title',
|
||||
'owner',
|
||||
'is-owner',
|
||||
'current-user-is-following'
|
||||
];
|
||||
}
|
||||
|
||||
mapResourceToValue(corpus) {
|
||||
return {
|
||||
'id': corpus.id,
|
||||
@ -49,9 +29,9 @@ nopaque.resource_lists.PublicCorpusList = class PublicCorpusList extends nopaque
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('corpus-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('corpus-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -72,21 +52,4 @@ nopaque.resource_lists.PublicCorpusList = class PublicCorpusList extends nopaque
|
||||
<ul class="pagination"></ul>
|
||||
`.trim();
|
||||
}
|
||||
|
||||
onClick(event) {
|
||||
let listItemElement = event.target.closest('.list-item[data-id]');
|
||||
if (listItemElement === null) {return;}
|
||||
let itemId = listItemElement.dataset.id;
|
||||
let listActionElement = event.target.closest('.list-action-trigger[data-list-action]');
|
||||
let listAction = listActionElement === null ? 'view' : listActionElement.dataset.listAction;
|
||||
switch (listAction) {
|
||||
case 'view': {
|
||||
window.location.href = `/corpora/${itemId}`;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,10 +1,23 @@
|
||||
nopaque.resource_lists.ResourceList = class ResourceList {
|
||||
class ResourceList {
|
||||
/* A wrapper class for the list.js list.
|
||||
* This class is not meant to be used directly, instead it should be used as
|
||||
* a base class for concrete resource list implementations.
|
||||
*/
|
||||
|
||||
static htmlClass;
|
||||
static autoInit() {
|
||||
CorpusList.autoInit();
|
||||
CorpusFileList.autoInit();
|
||||
JobList.autoInit();
|
||||
JobInputList.autoInit();
|
||||
JobResultList.autoInit();
|
||||
SpaCyNLPPipelineModelList.autoInit();
|
||||
TesseractOCRPipelineModelList.autoInit();
|
||||
UserList.autoInit();
|
||||
AdminUserList.autoInit();
|
||||
CorpusFollowerList.autoInit();
|
||||
CorpusTextInfoList.autoInit();
|
||||
CorpusTokenList.autoInit();
|
||||
}
|
||||
|
||||
static defaultOptions = {
|
||||
page: 5,
|
||||
@ -21,9 +34,9 @@ nopaque.resource_lists.ResourceList = class ResourceList {
|
||||
if ('valueNames' in options) {
|
||||
throw '"valueNames" is not supported as an option, define it as a getter in the list class';
|
||||
}
|
||||
let _options = nopaque.Utils.mergeObjectsDeep(
|
||||
let _options = Utils.mergeObjectsDeep(
|
||||
{item: this.item, valueNames: this.valueNames},
|
||||
nopaque.resource_lists.ResourceList.defaultOptions,
|
||||
ResourceList.defaultOptions,
|
||||
options
|
||||
);
|
||||
this.listContainerElement = listContainerElement;
|
@ -1,5 +1,9 @@
|
||||
nopaque.resource_lists.SpaCyNLPPipelineModelList = class SpaCyNLPPipelineModelList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'spacy-nlp-pipeline-model-list';
|
||||
class SpaCyNLPPipelineModelList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let spaCyNLPPipelineModelListElement of document.querySelectorAll('.spacy-nlp-pipeline-model-list:not(.no-autoinit)')) {
|
||||
new SpaCyNLPPipelineModelList(spaCyNLPPipelineModelListElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
@ -60,9 +64,9 @@ nopaque.resource_lists.SpaCyNLPPipelineModelList = class SpaCyNLPPipelineModelLi
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('spacy-nlp-pipeline-model-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('spacy-nlp-pipeline-model-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -116,7 +120,7 @@ nopaque.resource_lists.SpaCyNLPPipelineModelList = class SpaCyNLPPipelineModelLi
|
||||
switch (listAction) {
|
||||
case 'toggle-is-public': {
|
||||
let newIsPublicValue = listActionElement.checked;
|
||||
nopaque.requests.contributions.spacy_nlp_pipeline_models.entity.isPublic.update(itemId, newIsPublicValue)
|
||||
Requests.contributions.spacy_nlp_pipeline_models.entity.isPublic.update(itemId, newIsPublicValue)
|
||||
.catch((response) => {
|
||||
listActionElement.checked = !newIsPublicValue;
|
||||
});
|
||||
@ -138,7 +142,7 @@ nopaque.resource_lists.SpaCyNLPPipelineModelList = class SpaCyNLPPipelineModelLi
|
||||
switch (listAction) {
|
||||
case 'delete-request': {
|
||||
let values = this.listjs.get('id', itemId)[0].values();
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal">
|
||||
<div class="modal-content">
|
||||
@ -165,7 +169,7 @@ nopaque.resource_lists.SpaCyNLPPipelineModelList = class SpaCyNLPPipelineModelLi
|
||||
);
|
||||
let confirmElement = modalElement.querySelector('.action-button[data-action="confirm"]');
|
||||
confirmElement.addEventListener('click', (event) => {
|
||||
nopaque.requests.contributions.spacy_nlp_pipeline_models.entity.delete(itemId);
|
||||
Requests.contributions.spacy_nlp_pipeline_models.entity.delete(itemId);
|
||||
});
|
||||
modal.open();
|
||||
break;
|
||||
@ -216,4 +220,4 @@ nopaque.resource_lists.SpaCyNLPPipelineModelList = class SpaCyNLPPipelineModelLi
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
nopaque.resource_lists.TesseractOCRPipelineModelList = class TesseractOCRPipelineModelList extends nopaque.resource_lists.ResourceList {
|
||||
static htmlClass = 'tesseract-ocr-pipeline-model-list';
|
||||
class TesseractOCRPipelineModelList extends ResourceList {
|
||||
static autoInit() {
|
||||
for (let tesseractOCRPipelineModelListElement of document.querySelectorAll('.tesseract-ocr-pipeline-model-list:not(.no-autoinit)')) {
|
||||
new TesseractOCRPipelineModelList(tesseractOCRPipelineModelListElement);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(listContainerElement, options = {}) {
|
||||
super(listContainerElement, options);
|
||||
@ -69,9 +73,9 @@ nopaque.resource_lists.TesseractOCRPipelineModelList = class TesseractOCRPipelin
|
||||
|
||||
initListContainerElement() {
|
||||
if (!this.listContainerElement.hasAttribute('id')) {
|
||||
this.listContainerElement.id = nopaque.Utils.generateElementId('tesseract-ocr-pipeline-model-list-');
|
||||
this.listContainerElement.id = Utils.generateElementId('tesseract-ocr-pipeline-model-list-');
|
||||
}
|
||||
let listSearchElementId = nopaque.Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
let listSearchElementId = Utils.generateElementId(`${this.listContainerElement.id}-search-`);
|
||||
this.listContainerElement.innerHTML = `
|
||||
<div class="input-field">
|
||||
<i class="material-icons prefix">search</i>
|
||||
@ -125,7 +129,7 @@ nopaque.resource_lists.TesseractOCRPipelineModelList = class TesseractOCRPipelin
|
||||
switch (listAction) {
|
||||
case 'toggle-is-public': {
|
||||
let newIsPublicValue = listActionElement.checked;
|
||||
nopaque.requests.contributions.tesseract_ocr_pipeline_models.entity.isPublic.update(itemId, newIsPublicValue)
|
||||
Requests.contributions.tesseract_ocr_pipeline_models.entity.isPublic.update(itemId, newIsPublicValue)
|
||||
.catch((response) => {
|
||||
listActionElement.checked = !newIsPublicValue;
|
||||
});
|
||||
@ -147,7 +151,7 @@ nopaque.resource_lists.TesseractOCRPipelineModelList = class TesseractOCRPipelin
|
||||
switch (listAction) {
|
||||
case 'delete-request': {
|
||||
let values = this.listjs.get('id', itemId)[0].values();
|
||||
let modalElement = nopaque.Utils.HTMLToElement(
|
||||
let modalElement = Utils.HTMLToElement(
|
||||
`
|
||||
<div class="modal">
|
||||
<div class="modal-content">
|
||||
@ -174,7 +178,7 @@ nopaque.resource_lists.TesseractOCRPipelineModelList = class TesseractOCRPipelin
|
||||
);
|
||||
let confirmElement = modalElement.querySelector('.action-button[data-action="confirm"]');
|
||||
confirmElement.addEventListener('click', (event) => {
|
||||
nopaque.requests.contributions.tesseract_ocr_pipeline_models.entity.delete(itemId);
|
||||
Requests.contributions.tesseract_ocr_pipeline_models.entity.delete(itemId);
|
||||
});
|
||||
modal.open();
|
||||
break;
|
||||
@ -225,4 +229,4 @@ nopaque.resource_lists.TesseractOCRPipelineModelList = class TesseractOCRPipelin
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|