mirror of
https://gitlab.ub.uni-bielefeld.de/sfb1288inf/nopaque.git
synced 2024-12-24 10:34:17 +00:00
more exception handling
This commit is contained in:
parent
966cdc824a
commit
bc87bf67bf
@ -70,14 +70,10 @@ def corpus_analysis_session_handler(app, corpus_id, user_id, session_id):
|
||||
connect_status = client.connect()
|
||||
payload = {'code': connect_status, 'msg': cqi.api.specification.lookup[connect_status]} # noqa
|
||||
except cqi.errors.CQiException as e:
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500, 'desc': None,
|
||||
'msg': 'Internal Server Error', 'payload': payload}
|
||||
socketio.emit('corpus_analysis_init', response, room=session_id)
|
||||
handle_cqi_exception('corpus_analysis_init', e, session_id)
|
||||
return
|
||||
except gaierror:
|
||||
response = {'code': 500, 'desc': None,
|
||||
'msg': 'Internal Server Error'}
|
||||
response = {'code': 500, 'desc': None, 'msg': 'Internal Server Error'} # noqa
|
||||
socketio.emit('corpus_analysis_init', response, room=session_id)
|
||||
return
|
||||
corpus_analysis_clients[session_id] = client
|
||||
@ -127,7 +123,7 @@ def corpus_analysis_get_meta_data(corpus_id):
|
||||
if client is None:
|
||||
response = {'code': 424, 'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency'}
|
||||
socketio.emit('corpus_analysis_query', response, room=request.sid)
|
||||
socketio.emit('corpus_analysis_meta_data', response, room=request.sid)
|
||||
return
|
||||
# check if client is busy or not
|
||||
if client.status == 'running':
|
||||
@ -197,10 +193,8 @@ def corpus_analysis_query(query):
|
||||
query_status = corpus.query(query)
|
||||
results = corpus.subcorpora.get('Results')
|
||||
except cqi.errors.CQiException as e:
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500, 'desc': None, 'msg': 'Internal Server Error',
|
||||
'payload': payload}
|
||||
socketio.emit('corpus_analysis_query', response, room=request.sid)
|
||||
client.status = 'ready'
|
||||
handle_cqi_exception('corpus_analysis_query', e, request.sid)
|
||||
return
|
||||
payload = {'status': query_status,
|
||||
'msg': cqi.api.specification.lookup[query_status],
|
||||
@ -214,17 +208,19 @@ def corpus_analysis_query(query):
|
||||
while chunk_start <= results.attrs['size']:
|
||||
if client.status == 'abort':
|
||||
break
|
||||
chunk = results.export(context=context, cutoff=chunk_size,
|
||||
offset=chunk_start)
|
||||
try:
|
||||
chunk = results.export(context=context, cutoff=chunk_size, offset=chunk_start) # noqa
|
||||
except cqi.errors.CQiException as e:
|
||||
handle_cqi_exception('corpus_analysis_query', e, request.sid)
|
||||
break
|
||||
if (results.attrs['size'] == 0):
|
||||
progress = 100
|
||||
else:
|
||||
progress = ((chunk_start + chunk_size) / results.attrs['size']) * 100 # noqa
|
||||
progress = min(100, int(math.ceil(progress)))
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK',
|
||||
'payload': {'chunk': chunk, 'progress': progress}}
|
||||
socketio.emit('corpus_analysis_query_results', response,
|
||||
room=request.sid)
|
||||
payload = {'chunk': chunk, 'progress': progress}
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK', 'payload': payload}
|
||||
socketio.emit('corpus_analysis_query_results', response, room=request.sid) # noqa
|
||||
chunk_start += chunk_size
|
||||
client.status = 'ready'
|
||||
|
||||
@ -238,11 +234,8 @@ def corpus_analysis_get_match_with_full_context(payload):
|
||||
last_cpos = payload['last_cpos']
|
||||
client = corpus_analysis_clients.get(request.sid)
|
||||
if client is None:
|
||||
response = {'code': 424,
|
||||
'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency',
|
||||
'type': type,
|
||||
'data_indexes': data_indexes}
|
||||
response = {'code': 424, 'desc': 'No client found for this session',
|
||||
'msg': 'Failed Dependency'}
|
||||
socketio.emit('corpus_analysis_get_match_with_full_context', response,
|
||||
room=request.sid)
|
||||
return
|
||||
@ -254,42 +247,30 @@ def corpus_analysis_get_match_with_full_context(payload):
|
||||
try:
|
||||
corpus = client.corpora.get('CORPUS')
|
||||
s = corpus.structural_attributes.get('s')
|
||||
payload = {}
|
||||
payload['matches'] = []
|
||||
payload['cpos_lookup'] = {}
|
||||
payload['text_lookup'] = {}
|
||||
payload['progress'] = 0
|
||||
i = 0
|
||||
# Send data one match at a time.
|
||||
for index, f_cpos, l_cpos in zip(data_indexes, first_cpos, last_cpos):
|
||||
i += 1
|
||||
tmp_match = s.export(f_cpos, l_cpos, context=10)
|
||||
payload['matches'].append(tmp_match['matches'][0])
|
||||
payload['cpos_lookup'].update(tmp_match['cpos_lookup'])
|
||||
payload['text_lookup'].update(tmp_match['text_lookup'])
|
||||
payload['progress'] = i/len(data_indexes)*100
|
||||
response = {'code': 200,
|
||||
'desc': None,
|
||||
'msg': 'OK',
|
||||
'payload': payload,
|
||||
'type': type,
|
||||
'data_indexes': data_indexes}
|
||||
socketio.emit('corpus_analysis_get_match_with_full_context',
|
||||
response, room=request.sid)
|
||||
payload['matches'] = []
|
||||
payload['cpos_lookup'] = {}
|
||||
payload['text_lookup'] = {}
|
||||
except cqi.errors.CQiException as e:
|
||||
payload = {'code': e.code, 'desc': e.description, 'msg': e.name}
|
||||
response = {'code': 500,
|
||||
'desc': None,
|
||||
'msg': 'Internal Server Error',
|
||||
'payload': payload,
|
||||
'type': type,
|
||||
'data_indexes': data_indexes}
|
||||
handle_cqi_exception('corpus_analysis_get_match_with_full_context', e, request.sid) # noqa
|
||||
return
|
||||
i = 0
|
||||
# Send data one match at a time.
|
||||
for index, f_cpos, l_cpos in zip(data_indexes, first_cpos, last_cpos):
|
||||
i += 1
|
||||
matches = []
|
||||
cpos_lookup = text_lookup = {}
|
||||
try:
|
||||
tmp = s.export(f_cpos, l_cpos, context=10)
|
||||
except cqi.errors.CQiException as e:
|
||||
handle_cqi_exception('corpus_analysis_get_match_with_full_context', e, request.sid) # noqa
|
||||
break
|
||||
matches.append(tmp['matches'][0])
|
||||
cpos_lookup.update(tmp['cpos_lookup'])
|
||||
text_lookup.update(tmp['text_lookup'])
|
||||
progress = i / len(data_indexes) * 100
|
||||
payload = {'matches': matches, 'progress': progress,
|
||||
'cpos_lookup': cpos_lookup, 'text_lookup': text_lookup}
|
||||
response = {'code': 200, 'desc': None, 'msg': 'OK', 'payload': payload,
|
||||
'type': type, 'data_indexes': data_indexes}
|
||||
socketio.emit('corpus_analysis_get_match_with_full_context',
|
||||
response,
|
||||
room=request.sid)
|
||||
response, room=request.sid)
|
||||
client.status = 'ready'
|
||||
|
||||
|
||||
@ -301,7 +282,7 @@ def export_corpus(corpus_id):
|
||||
response = {'code': 404, 'msg': 'Not found'}
|
||||
socketio.emit('export_corpus', response, room=request.sid)
|
||||
return
|
||||
if corpus.status not in ['prepared', 'start analysis', 'stop analysis']:
|
||||
if corpus.status != 'prepared':
|
||||
response = {'code': 412, 'msg': 'Precondition Failed'}
|
||||
socketio.emit('export_corpus', response, room=request.sid)
|
||||
return
|
||||
@ -315,3 +296,13 @@ def export_corpus(corpus_id):
|
||||
shutil.make_archive(zip_path, 'zip', corpus.path)
|
||||
shutil.move(zip_path + '.zip', corpus.archive_file)
|
||||
socketio.emit('export_corpus_' + str(corpus.id), room=request.sid)
|
||||
|
||||
|
||||
def handle_cqi_exception(event, exception, room):
|
||||
response = {'code': 500,
|
||||
'desc': None,
|
||||
'msg': 'Internal Server Error',
|
||||
'payload': {'code': exception.code,
|
||||
'desc': exception.description,
|
||||
'msg': exception.name}}
|
||||
socketio.emit(event, response, room=room)
|
||||
|
Loading…
Reference in New Issue
Block a user