Compare commits

...

11 Commits

Author SHA1 Message Date
Patrick Jentsch
bd0a9c60f8 strictly use socket.io class based namespaces 2024-11-07 12:12:42 +01:00
Patrick Jentsch
d41ebc6efe Fix project vscode settings 2024-11-07 10:51:35 +01:00
Patrick Jentsch
63690222ed Rename cqi extensions file 2024-11-07 10:44:27 +01:00
Patrick Jentsch
b4faa1c695 Code enhancements in vrt file normalizer module 2024-11-07 10:40:25 +01:00
Patrick Jentsch
909b130285 Fix wrong import 2024-11-07 09:48:40 +01:00
Patrick Jentsch
c223f07289 Codestyle enhancements 2024-11-07 08:57:32 +01:00
Patrick Jentsch
fcb49025e9 remove unused socketio event handlers 2024-11-07 08:51:49 +01:00
Patrick Jentsch
191d7813a7 prefix extension name with "nopaque_" 2024-11-07 08:35:02 +01:00
Patrick Jentsch
f255fef631 Remove debug print statement 2024-11-07 08:32:20 +01:00
Patrick Jentsch
76171f306d Remove debug print statements 2024-11-07 08:31:52 +01:00
Patrick Jentsch
5ea6d45f46 Reset all corpora on deploy cli command 2024-11-07 08:31:31 +01:00
29 changed files with 145 additions and 306 deletions

View File

@ -1,7 +1,10 @@
{
"editor.rulers": [79],
"editor.tabSize": 2,
"editor.tabSize": 4,
"files.insertFinalNewline": true,
"files.trimFinalNewlines": true,
"files.trimTrailingWhitespace": true
"files.trimTrailingWhitespace": true,
"[javascript]": {
"editor.tabSize": 2,
}
}

View File

@ -44,7 +44,7 @@ def deploy():
TesseractOCRPipelineModel.insert_defaults()
print('Stop running analysis sessions')
for corpus in Corpus.query.filter(Corpus.num_analysis_sessions > 0).all():
for corpus in Corpus.query.all():
corpus.num_analysis_sessions = 0
db.session.commit()

View File

@ -75,9 +75,7 @@ def terms_of_use():
@bp.route('/social-area')
@login_required
def social_area():
print('test')
corpora = Corpus.query.filter(Corpus.is_public == True, Corpus.user != current_user).all()
print(corpora)
users = User.query.filter(User.is_public == True, User.id != current_user.id).all()
return render_template(
'main/social_area.html.j2',

View File

@ -167,7 +167,6 @@ class CreateSpacyNLPPipelineJobForm(CreateJobBaseForm):
version = kwargs.pop('version', service_manifest['latest_version'])
super().__init__(*args, **kwargs)
service_info = service_manifest['versions'][version]
print(service_info)
if self.encoding_detection.render_kw is None:
self.encoding_detection.render_kw = {}
self.encoding_detection.render_kw['disabled'] = True

View File

@ -1,47 +0,0 @@
from flask_login import current_user
from flask_socketio import join_room, leave_room
from app import hashids, socketio
from app.decorators import socketio_login_required
from app.models import User
@socketio.on('GET /users/<user_id>')
@socketio_login_required
def get_user(user_hashid):
user_id = hashids.decode(user_hashid)
user = User.query.get(user_id)
if user is None:
return {'status': 404, 'statusText': 'Not found'}
if not (user == current_user or current_user.is_administrator):
return {'status': 403, 'statusText': 'Forbidden'}
return {
'body': user.to_json_serializeable(backrefs=True, relationships=True),
'status': 200,
'statusText': 'OK'
}
@socketio.on('SUBSCRIBE /users/<user_id>')
@socketio_login_required
def subscribe_user(user_hashid):
user_id = hashids.decode(user_hashid)
user = User.query.get(user_id)
if user is None:
return {'status': 404, 'statusText': 'Not found'}
if not (user == current_user or current_user.is_administrator):
return {'status': 403, 'statusText': 'Forbidden'}
join_room(f'/users/{user.hashid}')
return {'status': 200, 'statusText': 'OK'}
@socketio.on('UNSUBSCRIBE /users/<user_id>')
@socketio_login_required
def unsubscribe_user(user_hashid):
user_id = hashids.decode(user_hashid)
user = User.query.get(user_id)
if user is None:
return {'status': 404, 'statusText': 'Not found'}
if not (user == current_user or current_user.is_administrator):
return {'status': 403, 'statusText': 'Forbidden'}
leave_room(f'/users/{user.hashid}')
return {'status': 200, 'statusText': 'OK'}

View File

@ -1,114 +0,0 @@
from flask_login import current_user
from flask_socketio import join_room
from app import hashids, socketio
from app.decorators import socketio_admin_required, socketio_login_required
from app.models import User
@socketio.on('GET /users')
@socketio_admin_required
def get_users():
users = User.query.filter_by().all()
return {
'body': [user.to_json_serializable() for user in users],
'options': {
'status': 200,
'statusText': 'OK',
'headers': {'Content-Type: application/json'}
}
}
@socketio.on('SUBSCRIBE /users')
@socketio_admin_required
def subscribe_users():
join_room('/users')
return {'options': {'status': 200, 'statusText': 'OK'}}
@socketio.on('GET /users/<user_id>')
@socketio_login_required
def get_user(user_hashid):
user_id = hashids.decode(user_hashid)
user = User.query.get(user_id)
if user is None:
return {'options': {'status': 404, 'statusText': 'Not found'}}
if not (user == current_user or current_user.is_administrator):
return {'options': {'status': 403, 'statusText': 'Forbidden'}}
return {
'body': user.to_json_serializable(),
'options': {
'status': 200,
'statusText': 'OK',
'headers': {'Content-Type: application/json'}
}
}
@socketio.on('SUBSCRIBE /users/<user_id>')
@socketio_login_required
def subscribe_user(user_hashid):
user_id = hashids.decode(user_hashid)
user = User.query.get(user_id)
if user is None:
return {'options': {'status': 404, 'statusText': 'Not found'}}
if not (user == current_user or current_user.is_administrator):
return {'options': {'status': 403, 'statusText': 'Forbidden'}}
join_room(f'/users/{user.hashid}')
return {'options': {'status': 200, 'statusText': 'OK'}}
@socketio.on('GET /public_users')
@socketio_login_required
def get_public_users():
users = User.query.filter_by(is_public=True).all()
return {
'body': [
user.to_json_serializable(filter_by_privacy_settings=True)
for user in users
],
'options': {
'status': 200,
'statusText': 'OK',
'headers': {'Content-Type: application/json'}
}
}
@socketio.on('SUBSCRIBE /users')
@socketio_admin_required
def subscribe_users():
join_room('/public_users')
return {'options': {'status': 200, 'statusText': 'OK'}}
@socketio.on('GET /public_users/<user_id>')
@socketio_login_required
def get_user(user_hashid):
user_id = hashids.decode(user_hashid)
user = User.query.filter_by(id=user_id, is_public=True).first()
if user is None:
return {'options': {'status': 404, 'statusText': 'Not found'}}
if not (user == current_user or current_user.is_administrator):
return {'options': {'status': 403, 'statusText': 'Forbidden'}}
return {
'body': user.to_json_serializable(filter_by_privacy_settings=True),
'options': {
'status': 200,
'statusText': 'OK',
'headers': {'Content-Type: application/json'}
}
}
@socketio.on('SUBSCRIBE /public_users/<user_id>')
@socketio_login_required
def subscribe_user(user_hashid):
user_id = hashids.decode(user_hashid)
user = User.query.filter_by(id=user_id, is_public=True).first()
if user is None:
return {'options': {'status': 404, 'statusText': 'Not found'}}
if not (user == current_user or current_user.is_administrator):
return {'options': {'status': 403, 'statusText': 'Forbidden'}}
join_room(f'/public_users/{user.hashid}')
return {'options': {'status': 200, 'statusText': 'OK'}}

View File

@ -1,69 +1,25 @@
from flask import current_app
from pathlib import Path
def normalize_vrt_file(input_file, output_file):
def check_pos_attribute_order(vrt_lines):
# The following orders are possible:
# since 26.02.2019: 'word,lemma,simple_pos,pos,ner'
# since 26.03.2021: 'word,pos,lemma,simple_pos,ner'
# since 27.01.2022: 'word,pos,lemma,simple_pos'
# This Function tries to find out which order we have by looking at the
# number of attributes and the position of the simple_pos attribute
SIMPLE_POS_LABELS = [
'ADJ', 'ADP', 'ADV', 'AUX', 'CONJ',
'DET', 'INTJ', 'NOUN', 'NUM', 'PART',
'PRON', 'PROPN', 'PUNCT', 'SCONJ', 'SYM',
'VERB', 'X'
]
for line in vrt_lines:
if line.startswith('<'):
continue
pos_attrs = line.rstrip('\n').split('\t')
num_pos_attrs = len(pos_attrs)
if num_pos_attrs == 4:
if pos_attrs[3] in SIMPLE_POS_LABELS:
return ['word', 'pos', 'lemma', 'simple_pos']
continue
elif num_pos_attrs == 5:
if pos_attrs[2] in SIMPLE_POS_LABELS:
return ['word', 'lemma', 'simple_pos', 'pos', 'ner']
elif pos_attrs[3] in SIMPLE_POS_LABELS:
return ['word', 'pos', 'lemma', 'simple_pos', 'ner']
continue
return None
def check_has_ent_as_s_attr(vrt_lines):
for line in vrt_lines:
if line.startswith('<ent'):
return True
return False
def pos_attrs_to_string_1(pos_attrs):
return f'{pos_attrs[0]}\t{pos_attrs[3]}\t{pos_attrs[1]}\t{pos_attrs[2]}\n'
def pos_attrs_to_string_2(pos_attrs):
return f'{pos_attrs[0]}\t{pos_attrs[1]}\t{pos_attrs[2]}\t{pos_attrs[3]}\n'
def normalize_vrt_file(input_file: Path, output_file: Path):
current_app.logger.info(f'Converting {input_file}...')
with open(input_file) as f:
with input_file.open() as f:
input_vrt_lines = f.readlines()
pos_attr_order = check_pos_attribute_order(input_vrt_lines)
has_ent_as_s_attr = check_has_ent_as_s_attr(input_vrt_lines)
pos_attr_order = _check_pos_attribute_order(input_vrt_lines)
has_ent_as_s_attr = _check_has_ent_as_s_attr(input_vrt_lines)
current_app.logger.info(f'Detected pos_attr_order: [{",".join(pos_attr_order)}]')
current_app.logger.info(f'Detected has_ent_as_s_attr: {has_ent_as_s_attr}')
if pos_attr_order == ['word', 'lemma', 'simple_pos', 'pos', 'ner']:
pos_attrs_to_string_function = pos_attrs_to_string_1
pos_attrs_to_string_function = _pos_attrs_to_string_1
elif pos_attr_order == ['word', 'pos', 'lemma', 'simple_pos', 'ner']:
pos_attrs_to_string_function = pos_attrs_to_string_2
pos_attrs_to_string_function = _pos_attrs_to_string_2
elif pos_attr_order == ['word', 'pos', 'lemma', 'simple_pos']:
pos_attrs_to_string_function = pos_attrs_to_string_2
pos_attrs_to_string_function = _pos_attrs_to_string_2
else:
raise Exception('Can not handle format')
@ -113,5 +69,49 @@ def normalize_vrt_file(input_file, output_file):
current_ent = pos_attrs[4]
output_vrt += pos_attrs_to_string_function(pos_attrs)
with open(output_file, 'w') as f:
with output_file.open(mode='w') as f:
f.write(output_vrt)
def _check_pos_attribute_order(vrt_lines: list[str]) -> list[str]:
# The following orders are possible:
# since 26.02.2019: 'word,lemma,simple_pos,pos,ner'
# since 26.03.2021: 'word,pos,lemma,simple_pos,ner'
# since 27.01.2022: 'word,pos,lemma,simple_pos'
# This Function tries to find out which order we have by looking at the
# number of attributes and the position of the simple_pos attribute
SIMPLE_POS_LABELS = [
'ADJ', 'ADP', 'ADV', 'AUX', 'CONJ', 'DET', 'INTJ', 'NOUN', 'NUM',
'PART', 'PRON', 'PROPN', 'PUNCT', 'SCONJ', 'SYM', 'VERB', 'X'
]
for line in vrt_lines:
if line.startswith('<'):
continue
pos_attrs = line.rstrip('\n').split('\t')
num_pos_attrs = len(pos_attrs)
if num_pos_attrs == 4:
if pos_attrs[3] in SIMPLE_POS_LABELS:
return ['word', 'pos', 'lemma', 'simple_pos']
continue
elif num_pos_attrs == 5:
if pos_attrs[2] in SIMPLE_POS_LABELS:
return ['word', 'lemma', 'simple_pos', 'pos', 'ner']
elif pos_attrs[3] in SIMPLE_POS_LABELS:
return ['word', 'pos', 'lemma', 'simple_pos', 'ner']
continue
# TODO: raise exception "can't determine attribute order"
def _check_has_ent_as_s_attr(vrt_lines: list[str]) -> bool:
for line in vrt_lines:
if line.startswith('<ent'):
return True
return False
def _pos_attrs_to_string_1(pos_attrs: list[str]) -> str:
return f'{pos_attrs[0]}\t{pos_attrs[3]}\t{pos_attrs[1]}\t{pos_attrs[2]}\n'
def _pos_attrs_to_string_2(pos_attrs: list[str]) -> str:
return f'{pos_attrs[0]}\t{pos_attrs[1]}\t{pos_attrs[2]}\t{pos_attrs[3]}\n'

View File

@ -1,2 +1,2 @@
from .handle_corpora import job as handle_corpora
from .handle_jobs import job as handle_jobs
from .handle_corpora import handle_corpora
from .handle_jobs import handle_jobs

View File

@ -1,12 +1,12 @@
from app import db, docker_client, scheduler
from app.models import Corpus, CorpusStatus
from flask import current_app
import docker
import os
import shutil
from app import db, docker_client, scheduler
from app.models import Corpus, CorpusStatus
def job():
def handle_corpora():
with scheduler.app.app_context():
_handle_corpora()
@ -21,14 +21,14 @@ def _handle_corpora():
for corpus in [x for x in corpora if x.status == CorpusStatus.RUNNING_ANALYSIS_SESSION and x.num_analysis_sessions == 0]:
corpus.status = CorpusStatus.CANCELING_ANALYSIS_SESSION
for corpus in [x for x in corpora if x.status == CorpusStatus.RUNNING_ANALYSIS_SESSION]:
_checkout_analysing_corpus_container(corpus)
_checkout_cqpserver_container(corpus)
for corpus in [x for x in corpora if x.status == CorpusStatus.STARTING_ANALYSIS_SESSION]:
_create_cqpserver_container(corpus)
for corpus in [x for x in corpora if x.status == CorpusStatus.CANCELING_ANALYSIS_SESSION]:
_remove_cqpserver_container(corpus)
db.session.commit()
def _create_build_corpus_service(corpus):
def _create_build_corpus_service(corpus: Corpus):
''' # Docker service settings # '''
''' ## Command ## '''
command = ['bash', '-c']
@ -53,9 +53,7 @@ def _create_build_corpus_service(corpus):
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cwb:r1879'
''' ## Labels ## '''
labels = {
'origin': current_app.config['SERVER_NAME'],
'type': 'corpus.build',
'corpus_id': str(corpus.id)
'nopaque.server_name': current_app.config['SERVER_NAME']
}
''' ## Mounts ## '''
mounts = []
@ -100,7 +98,7 @@ def _create_build_corpus_service(corpus):
return
corpus.status = CorpusStatus.QUEUED
def _checkout_build_corpus_service(corpus):
def _checkout_build_corpus_service(corpus: Corpus):
service_name = f'build-corpus_{corpus.id}'
try:
service = docker_client.services.get(service_name)
@ -128,8 +126,7 @@ def _checkout_build_corpus_service(corpus):
except docker.errors.DockerException as e:
current_app.logger.error(f'Remove service "{service_name}" failed: {e}')
def _create_cqpserver_container(corpus):
''' # Docker container settings # '''
def _create_cqpserver_container(corpus: Corpus):
''' ## Command ## '''
command = []
command.append(
@ -146,7 +143,7 @@ def _create_cqpserver_container(corpus):
''' ## Image ## '''
image = f'{current_app.config["NOPAQUE_DOCKER_IMAGE_PREFIX"]}cwb:r1879'
''' ## Name ## '''
name = f'cqpserver_{corpus.id}'
name = f'nopaque-cqpserver-{corpus.id}'
''' ## Network ## '''
network = f'{current_app.config["NOPAQUE_DOCKER_NETWORK_NAME"]}'
''' ## Volumes ## '''
@ -203,8 +200,8 @@ def _create_cqpserver_container(corpus):
return
corpus.status = CorpusStatus.RUNNING_ANALYSIS_SESSION
def _checkout_analysing_corpus_container(corpus):
container_name = f'cqpserver_{corpus.id}'
def _checkout_cqpserver_container(corpus: Corpus):
container_name = f'nopaque-cqpserver-{corpus.id}'
try:
docker_client.containers.get(container_name)
except docker.errors.NotFound as e:
@ -214,8 +211,8 @@ def _checkout_analysing_corpus_container(corpus):
except docker.errors.DockerException as e:
current_app.logger.error(f'Get container "{container_name}" failed: {e}')
def _remove_cqpserver_container(corpus):
container_name = f'cqpserver_{corpus.id}'
def _remove_cqpserver_container(corpus: Corpus):
container_name = f'nopaque-cqpserver-{corpus.id}'
try:
container = docker_client.containers.get(container_name)
except docker.errors.NotFound:

View File

@ -1,3 +1,10 @@
from datetime import datetime
from flask import current_app
from werkzeug.utils import secure_filename
import docker
import json
import os
import shutil
from app import db, docker_client, hashids, scheduler
from app.models import (
Job,
@ -6,16 +13,9 @@ from app.models import (
TesseractOCRPipelineModel,
SpaCyNLPPipelineModel
)
from datetime import datetime
from flask import current_app
from werkzeug.utils import secure_filename
import docker
import json
import os
import shutil
def job():
def handle_jobs():
with scheduler.app.app_context():
_handle_jobs()
@ -29,7 +29,7 @@ def _handle_jobs():
_remove_job_service(job)
db.session.commit()
def _create_job_service(job):
def _create_job_service(job: Job):
''' # Docker service settings # '''
''' ## Service specific settings ## '''
if job.service == 'file-setup-pipeline':
@ -86,9 +86,7 @@ def _create_job_service(job):
constraints = ['node.role==worker']
''' ## Labels ## '''
labels = {
'origin': current_app.config['SERVER_NAME'],
'type': 'job',
'job_id': str(job.id)
'origin': current_app.config['SERVER_NAME']
}
''' ## Mounts ## '''
mounts = []
@ -169,7 +167,7 @@ def _create_job_service(job):
return
job.status = JobStatus.QUEUED
def _checkout_job_service(job):
def _checkout_job_service(job: Job):
service_name = f'job_{job.id}'
try:
service = docker_client.services.get(service_name)
@ -218,7 +216,7 @@ def _checkout_job_service(job):
except docker.errors.DockerException as e:
current_app.logger.error(f'Remove service "{service_name}" failed: {e}')
def _remove_job_service(job):
def _remove_job_service(job: Job):
service_name = f'job_{job.id}'
try:
service = docker_client.services.get(service_name)

View File

@ -8,7 +8,7 @@ import shutil
import xml.etree.ElementTree as ET
from app import db
from app.converters.vrt import normalize_vrt_file
from app.extensions.sqlalchemy_extras import IntEnumColumn
from app.extensions.nopaque_sqlalchemy_extras import IntEnumColumn
from .corpus_follower_association import CorpusFollowerAssociation

View File

@ -43,7 +43,7 @@ def resource_after_delete(mapper, connection, resource):
}
]
room = f'/users/{resource.user_hashid}'
socketio.emit('PATCH', jsonpatch, room=room)
socketio.emit('patch_user', jsonpatch, namespace='/users', room=room)
def cfa_after_delete(mapper, connection, cfa):
@ -55,7 +55,7 @@ def cfa_after_delete(mapper, connection, cfa):
}
]
room = f'/users/{cfa.corpus.user.hashid}'
socketio.emit('PATCH', jsonpatch, room=room)
socketio.emit('patch_user', jsonpatch, namespace='/users', room=room)
def resource_after_insert(mapper, connection, resource):
@ -70,7 +70,7 @@ def resource_after_insert(mapper, connection, resource):
}
]
room = f'/users/{resource.user_hashid}'
socketio.emit('PATCH', jsonpatch, room=room)
socketio.emit('patch_user', jsonpatch, namespace='/users', room=room)
def cfa_after_insert(mapper, connection, cfa):
@ -84,7 +84,7 @@ def cfa_after_insert(mapper, connection, cfa):
}
]
room = f'/users/{cfa.corpus.user.hashid}'
socketio.emit('PATCH', jsonpatch, room=room)
socketio.emit('patch_user', jsonpatch, namespace='/users', room=room)
def resource_after_update(mapper, connection, resource):
@ -110,7 +110,7 @@ def resource_after_update(mapper, connection, resource):
)
if jsonpatch:
room = f'/users/{resource.user_hashid}'
socketio.emit('PATCH', jsonpatch, room=room)
socketio.emit('patch_user', jsonpatch, namespace='/users', room=room)
def job_after_update(mapper, connection, job):

View File

@ -6,7 +6,7 @@ from time import sleep
from pathlib import Path
import shutil
from app import db
from app.extensions.sqlalchemy_extras import ContainerColumn, IntEnumColumn
from app.extensions.nopaque_sqlalchemy_extras import ContainerColumn, IntEnumColumn
class JobStatus(IntEnum):

View File

@ -5,7 +5,7 @@ from pathlib import Path
import requests
import yaml
from app import db
from app.extensions.sqlalchemy_extras import ContainerColumn
from app.extensions.nopaque_sqlalchemy_extras import ContainerColumn
from .file_mixin import FileMixin
from .user import User

View File

@ -5,7 +5,7 @@ from pathlib import Path
import requests
import yaml
from app import db
from app.extensions.sqlalchemy_extras import ContainerColumn
from app.extensions.nopaque_sqlalchemy_extras import ContainerColumn
from .file_mixin import FileMixin
from .user import User

View File

@ -11,7 +11,7 @@ import re
import secrets
import shutil
from app import db, hashids
from app.extensions.sqlalchemy_extras import IntEnumColumn
from app.extensions.nopaque_sqlalchemy_extras import IntEnumColumn
from .corpus import Corpus
from .corpus_follower_association import CorpusFollowerAssociation
from .corpus_follower_role import CorpusFollowerRole

View File

@ -9,7 +9,7 @@ from threading import Lock
from app import db, docker_client, hashids, socketio
from app.decorators import socketio_login_required
from app.models import Corpus, CorpusStatus
from . import extensions
from . import cqi_extensions
from .utils import CQiOverSocketIOSessionManager
@ -152,8 +152,8 @@ class CQiOverSocketIONamespace(Namespace):
if fn_name in CQI_API_FUNCTION_NAMES:
fn = getattr(cqi_client.api, fn_name)
elif fn_name in extensions.CQI_EXTENSION_FUNCTION_NAMES:
fn = getattr(extensions, fn_name)
elif fn_name in cqi_extensions.CQI_EXTENSION_FUNCTION_NAMES:
fn = getattr(cqi_extensions, fn_name)
else:
return {'code': 400, 'msg': 'Bad Request'}

View File

@ -1,29 +1,32 @@
nopaque.App = class App {
#promises;
constructor() {
this.data = {
promises: {getUser: {}, subscribeUser: {}},
users: {},
users: {}
};
this.sockets = {};
this.sockets['/users'] = io(
'/users',
{
transports: ['websocket'],
upgrade: false
}
);
// this.socket = io({transports: ['websocket'], upgrade: false});
this.socket = this.sockets['/users'];
this.socket.on('PATCH', (patch) => {this.onPatch(patch);});
this.#promises = {
getUser: {},
subscribeUser: {}
};
this.sockets = {
users: io('/users', {transports: ['websocket'], upgrade: false})
};
this.sockets.users.on('patch_user', (patch) => {this.onPatch(patch);});
}
getUser(userId) {
if (userId in this.data.promises.getUser) {
return this.data.promises.getUser[userId];
if (userId in this.#promises.getUser) {
return this.#promises.getUser[userId];
}
this.data.promises.getUser[userId] = new Promise((resolve, reject) => {
this.socket.emit('get_user', userId, (response) => {
let socket = this.sockets.users;
this.#promises.getUser[userId] = new Promise((resolve, reject) => {
socket.emit('get_user', userId, (response) => {
if (response.status === 200) {
this.data.users[userId] = response.body;
resolve(this.data.users[userId]);
@ -33,25 +36,27 @@ nopaque.App = class App {
});
});
return this.data.promises.getUser[userId];
return this.#promises.getUser[userId];
}
subscribeUser(userId) {
if (userId in this.data.promises.subscribeUser) {
return this.data.promises.subscribeUser[userId];
if (userId in this.#promises.subscribeUser) {
return this.#promises.subscribeUser[userId];
}
this.data.promises.subscribeUser[userId] = new Promise((resolve, reject) => {
this.socket.emit('subscribe_user', userId, (response) => {
if (response.status !== 200) {
reject(response);
return;
}
let socket = this.sockets.users;
this.#promises.subscribeUser[userId] = new Promise((resolve, reject) => {
socket.emit('subscribe_user', userId, (response) => {
if (response.status === 200) {
resolve(response);
} else {
reject(response);
}
});
});
return this.data.promises.subscribeUser[userId];
return this.#promises.subscribeUser[userId];
}
flash(message, category) {

View File

@ -8,7 +8,7 @@ nopaque.resource_displays.ResourceDisplay = class ResourceDisplay {
if (this.userId) {
app.subscribeUser(this.userId)
.then((response) => {
app.socket.on('PATCH', (patch) => {
app.sockets.users.on('patch_user', (patch) => {
if (this.isInitialized) {this.onPatch(patch);}
});
});

View File

@ -15,7 +15,7 @@ nopaque.resource_lists.CorpusFileList = class CorpusFileList extends nopaque.res
this.hasPermissionManageFiles = listContainerElement.dataset?.hasPermissionManageFiles == 'true' || false;
if (this.userId === undefined || this.corpusId === undefined) {return;}
app.subscribeUser(this.userId).then((response) => {
app.socket.on('PATCH', (patch) => {
app.sockets.users.on('patch_user', (patch) => {
if (this.isInitialized) {this.onPatch(patch);}
});
});

View File

@ -13,7 +13,7 @@ nopaque.resource_lists.CorpusFollowerList = class CorpusFollowerList extends nop
this.corpusId = listContainerElement.dataset.corpusId;
if (this.userId === undefined || this.corpusId === undefined) {return;}
app.subscribeUser(this.userId).then((response) => {
app.socket.on('PATCH', (patch) => {
app.sockets.users.on('patch_user', (patch) => {
if (this.isInitialized) {this.onPatch(patch);}
});
});

View File

@ -12,7 +12,7 @@ nopaque.resource_lists.CorpusList = class CorpusList extends nopaque.resource_li
this.userId = listContainerElement.dataset.userId;
if (this.userId === undefined) {return;}
app.subscribeUser(this.userId).then((response) => {
app.socket.on('PATCH', (patch) => {
app.sockets.users.on('patch_user', (patch) => {
if (this.isInitialized) {this.onPatch(patch);}
});
});

View File

@ -13,7 +13,7 @@ nopaque.resource_lists.JobList = class JobList extends nopaque.resource_lists.Re
this.userId = listContainerElement.dataset.userId;
if (this.userId === undefined) {return;}
app.subscribeUser(this.userId).then((response) => {
app.socket.on('PATCH', (patch) => {
app.sockets.users.on('patch_user', (patch) => {
if (this.isInitialized) {this.onPatch(patch);}
});
});

View File

@ -9,7 +9,7 @@ nopaque.resource_lists.JobResultList = class JobResultList extends nopaque.resou
this.jobId = listContainerElement.dataset.jobId;
if (this.userId === undefined || this.jobId === undefined) {return;}
app.subscribeUser(this.userId).then((response) => {
app.socket.on('PATCH', (patch) => {
app.sockets.users.on('patch_user', (patch) => {
if (this.isInitialized) {this.onPatch(patch);}
});
});

View File

@ -9,7 +9,7 @@ nopaque.resource_lists.SpaCyNLPPipelineModelList = class SpaCyNLPPipelineModelLi
this.userId = listContainerElement.dataset.userId;
if (this.userId === undefined) {return;}
app.subscribeUser(this.userId).then((response) => {
app.socket.on('PATCH', (patch) => {
app.sockets.users.on('patch_user', (patch) => {
if (this.isInitialized) {this.onPatch(patch);}
});
});

View File

@ -9,7 +9,7 @@ nopaque.resource_lists.TesseractOCRPipelineModelList = class TesseractOCRPipelin
this.userId = listContainerElement.dataset.userId;
if (this.userId === undefined) {return;}
app.subscribeUser(this.userId).then((response) => {
app.socket.on('PATCH', (patch) => {
app.sockets.users.on('patch_user', (patch) => {
if (this.isInitialized) {this.onPatch(patch);}
});
});