mirror of
https://gitlab.ub.uni-bielefeld.de/sfb1288inf/nopaque.git
synced 2024-11-15 01:05:42 +00:00
99 lines
4.3 KiB
Python
99 lines
4.3 KiB
Python
from app import db, logger, socketio
|
|
from app.events import connected_sessions
|
|
from app.models import Corpus
|
|
from flask import current_app, request
|
|
from flask_login import current_user, login_required
|
|
from .CQiWrapper.CQiWrapper import CQiWrapper
|
|
import sys
|
|
import gzip
|
|
import zlib
|
|
import json
|
|
|
|
'''
|
|
' A dictionary containing lists of, with corpus ids associated, Socket.IO
|
|
' session ids (sid). {<corpus_id>: [<sid>, ...], ...}
|
|
'''
|
|
corpus_analysis_sessions = {}
|
|
'''
|
|
' A dictionary containing Socket.IO session id - CQi client pairs.
|
|
' {<sid>: CQiClient, ...}
|
|
'''
|
|
corpus_analysis_clients = {}
|
|
|
|
|
|
@socketio.on('request_corpus_analysis')
|
|
@login_required
|
|
def request_corpus_analysis(corpus_id):
|
|
corpus = Corpus.query.get(corpus_id)
|
|
if corpus is None:
|
|
socketio.emit('init_corpus_analysis', '[404]: Not Found',
|
|
room=request.sid)
|
|
elif not (corpus.creator == current_user
|
|
or current_user.is_administrator()):
|
|
socketio.emit('init_corpus_analysis', '[403]: Forbidden',
|
|
room=request.sid)
|
|
else:
|
|
socketio.start_background_task(corpus_analysis_session_handler,
|
|
current_app._get_current_object(),
|
|
corpus_id, request.sid)
|
|
|
|
|
|
@socketio.on('corpus_analysis')
|
|
@login_required
|
|
def corpus_analysis(message):
|
|
client = corpus_analysis_clients.get(request.sid)
|
|
if client is None:
|
|
socketio.emit('query', '[424]: Failed Dependency',
|
|
room=request.sid)
|
|
return
|
|
""" Prepare and execute a query """
|
|
logger.warning('Payload: {}'.format(message))
|
|
corpus_name = 'CORPUS'
|
|
query = message['query']
|
|
client.select_corpus(corpus_name)
|
|
client.query_subcorpus(query)
|
|
results = client.show_query_results(
|
|
result_len=int(message['hits_per_page']),
|
|
context_len=int(message['context']))
|
|
# logger.warning('RESULTS: {}'.format(results))
|
|
size_internal_dict = sys.getsizeof(results) / 1000000
|
|
size_dict_to_str = sys.getsizeof(str(results)) / 1000000
|
|
compressed_str = gzip.compress(str(results).encode())
|
|
size_dict_to_str_compressed = sys.getsizeof(compressed_str) / 1000000
|
|
zlib_compressed = zlib.compress(json.dumps(results).encode('utf-8'))
|
|
size_zlib_compressed = sys.getsizeof(zlib_compressed) / 1000000
|
|
logger.warning('Internal size of dict for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_internal_dict))
|
|
logger.warning('Size of dict as raw string for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_dict_to_str))
|
|
logger.warning('Size of gzip compressed dict to string for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_dict_to_str_compressed))
|
|
logger.warning('Size of zlib compressed and utf-8 encoded string for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_zlib_compressed))
|
|
socketio.emit('corpus_analysis', zlib_compressed, room=request.sid)
|
|
|
|
|
|
def corpus_analysis_session_handler(app, corpus_id, session_id):
|
|
with app.app_context():
|
|
''' Setup analysis session '''
|
|
corpus = Corpus.query.get(corpus_id)
|
|
while corpus.status != 'analysing':
|
|
db.session.refresh(corpus)
|
|
socketio.sleep(3)
|
|
client = CQiWrapper(host='analyse_corpus_{}'.format(corpus_id))
|
|
client.connect()
|
|
corpus_analysis_clients[session_id] = client
|
|
if corpus_id not in corpus_analysis_sessions:
|
|
corpus_analysis_sessions[corpus_id] = [session_id]
|
|
else:
|
|
corpus_analysis_sessions[corpus_id].append(session_id)
|
|
socketio.emit('request_corpus_analysis', '[201]: Created',
|
|
room=session_id)
|
|
''' Observe analysis session '''
|
|
while session_id in connected_sessions:
|
|
socketio.sleep(3)
|
|
''' Teardown analysis session '''
|
|
client.disconnect()
|
|
corpus_analysis_clients.pop(session_id, None)
|
|
corpus_analysis_sessions[corpus_id].remove(session_id)
|
|
if not corpus_analysis_sessions[corpus_id]:
|
|
corpus_analysis_sessions.pop(corpus_id, None)
|
|
corpus.status = 'stop analysis'
|
|
db.session.commit()
|