from app import db, logger, socketio from app.events import connected_sessions from app.models import Corpus from flask import current_app, request from flask_login import current_user, login_required from .CQiWrapper.CQiWrapper import CQiWrapper import sys import gzip import zlib import json ''' ' A dictionary containing lists of, with corpus ids associated, Socket.IO ' session ids (sid). {: [, ...], ...} ''' analysis_sessions = {} ''' ' A dictionary containing Socket.IO session id - CQi client pairs. ' {: CQiClient, ...} ''' analysis_clients = {} @socketio.on('init_corpus_analysis') @login_required def init_corpus_analysis(corpus_id): corpus = Corpus.query.get(corpus_id) if corpus is None: socketio.emit('init_corpus_analysis', '[ERROR 404]: Not Found', room=request.sid) elif not (corpus.creator == current_user or current_user.is_administrator()): socketio.emit('init_corpus_analysis', '[ERROR 403]: Forbidden', room=request.sid) else: if corpus_id not in analysis_sessions: analysis_sessions[corpus_id] = [request.sid] else: analysis_sessions[corpus_id].append(request.sid) while corpus.status != 'analysing': db.session.refresh(corpus) socketio.sleep(3) analysis_clients[request.sid] = CQiWrapper( host='analyse_corpus_{}'.format(corpus.id)) analysis_clients[request.sid].connect() socketio.emit('init_corpus_analysis', 'Ready', room=request.sid) socketio.start_background_task(observe_corpus_analysis_connection, current_app._get_current_object(), corpus_id, request.sid) @socketio.on('query') @login_required def recv_query(message): analysis_client = analysis_clients.get(request.sid) if analysis_client is None: socketio.emit('query', '[ERROR 424]: Failed Dependency', room=request.sid) return """ Prepare and execute a query """ logger.warning('Payload: {}'.format(message)) corpus_name = 'CORPUS' query = message['query'] analysis_client.select_corpus(corpus_name) analysis_client.query_subcorpus(query) results = analysis_client.show_query_results(result_len=int(message['hits_per_page']), context_len=int(message['context'])) # logger.warning('RESULTS: {}'.format(results)) size_internal_dict = sys.getsizeof(results) / 1000000 size_dict_to_str = sys.getsizeof(str(results)) / 1000000 compressed_str = gzip.compress(str(results).encode()) size_dict_to_str_compressed = sys.getsizeof(compressed_str) / 1000000 zlib_compressed = zlib.compress(json.dumps(results).encode('utf-8')) size_zlib_compressed = sys.getsizeof(zlib_compressed) / 1000000 logger.warning('Internal size of dict for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_internal_dict)) logger.warning('Size of dict as raw string for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_dict_to_str)) logger.warning('Size of gzip compressed dict to string for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_dict_to_str_compressed)) logger.warning('Size of zlib compressed and utf-8 encoded string for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_zlib_compressed)) socketio.emit('query', zlib_compressed, room=request.sid) def observe_corpus_analysis_connection(app, corpus_id, session_id): with app.app_context(): while session_id in connected_sessions: socketio.sleep(3) analysis_client = analysis_clients.pop(session_id, None) if analysis_client is not None: analysis_client.disconnect() analysis_sessions[corpus_id].remove(session_id) if not analysis_sessions[corpus_id]: analysis_sessions.pop(corpus_id, None) corpus = Corpus.query.get(corpus_id) corpus.status = 'stop analysis' db.session.commit()