Rename some stuff

This commit is contained in:
Stephan Porada 2020-01-27 13:19:33 +01:00
parent cd02046590
commit 2b1e8f34de
3 changed files with 36 additions and 32 deletions

View File

@ -44,6 +44,7 @@ class CQiWrapper(CQiClient):
''' '''
p_attrs = self.corpus_positional_attributes(self.corpus_name) p_attrs = self.corpus_positional_attributes(self.corpus_name)
struct_attrs = self.corpus_structural_attributes(self.corpus_name) struct_attrs = self.corpus_structural_attributes(self.corpus_name)
logger.warning('struct_attrs: {}'.format(struct_attrs))
self.attr_strings = {} self.attr_strings = {}
self.attr_strings['positional_attrs'] = {} self.attr_strings['positional_attrs'] = {}
self.attr_strings['struct_attrs'] = {} self.attr_strings['struct_attrs'] = {}
@ -100,9 +101,8 @@ class CQiWrapper(CQiClient):
+ ':' + ':'
+ result_subcorpus_name) + result_subcorpus_name)
self.SUBCORPUS_NAMES.append(self.result_subcorpus) self.SUBCORPUS_NAMES.append(self.result_subcorpus)
self.total_nr_matches = self.cqp_subcorpus_size(self.result_subcorpus) self.match_count = self.cqp_subcorpus_size(self.result_subcorpus)
logger.warning('Nr of all matches is: {}'.format(self.total_nr_matches)) logger.warning('Nr of all matches is: {}'.format(self.match_count))
logger.warning('Nr of all matches is: {}'.format(self.total_nr_matches))
def show_subcorpora(self): def show_subcorpora(self):
''' '''
@ -136,7 +136,7 @@ class CQiWrapper(CQiClient):
self.corpus_max_len = self.cl_attribute_size( self.corpus_max_len = self.cl_attribute_size(
self.attr_strings['positional_attrs']['word'] self.attr_strings['positional_attrs']['word']
) )
self.nr_matches = min(result_len, self.total_nr_matches) self.nr_matches = min(result_len, self.match_count)
if self.nr_matches == 0: if self.nr_matches == 0:
logger.warning('Query resulted in 0 matches.') logger.warning('Query resulted in 0 matches.')
return None return None
@ -147,7 +147,7 @@ class CQiWrapper(CQiClient):
# [(1355, 1357), (1477, 1479)] Example for two boundry pairs # [(1355, 1357), (1477, 1479)] Example for two boundry pairs
offset_start = 0 if result_offset == 0 else result_offset offset_start = 0 if result_offset == 0 else result_offset
logger.warning('Offset start is: {}'.format(offset_start)) logger.warning('Offset start is: {}'.format(offset_start))
offset_end = min((self.nr_matches + result_offset - 1), self.total_nr_matches - 1) offset_end = min((self.nr_matches + result_offset - 1), self.match_count - 1)
logger.warning('Offset end is: {}'.format(offset_end)) logger.warning('Offset end is: {}'.format(offset_end))
match_boundaries = zip(self.cqp_dump_subcorpus(self.result_subcorpus, match_boundaries = zip(self.cqp_dump_subcorpus(self.result_subcorpus,
CONST_FIELD_MATCH, CONST_FIELD_MATCH,
@ -196,11 +196,12 @@ class CQiWrapper(CQiClient):
t_final = t3 - t2 t_final = t3 - t2
logger.warning('Got infos for {} CPOS in {} seconds:'.format(len_all_cpos, logger.warning('Got infos for {} CPOS in {} seconds:'.format(len_all_cpos,
t_final)) t_final))
self.results = {'matches': all_matches, self.results = {'code': 0,
'cpos_lookup': all_cpos_infos, 'result': {'matches': all_matches,
'text_lookup': text_lookup, 'match_count': self.match_count,
'total_nr_matches': self.total_nr_matches, 'cpos_lookup': all_cpos_infos,
'query': self.query} 'text_lookup': text_lookup,}
}
return self.results return self.results
def get_cpos_infos(self, all_cpos): def get_cpos_infos(self, all_cpos):

View File

@ -61,7 +61,7 @@ def corpus_analysis(message):
result_offset=result_offset) result_offset=result_offset)
result_offset += result_len # initial offfset is plus result len because client.show_query_results has been already executed once result_offset += result_len # initial offfset is plus result len because client.show_query_results has been already executed once
socketio.emit('corpus_analysis', results, room=request.sid) socketio.emit('corpus_analysis', results, room=request.sid)
while result_offset < client.total_nr_matches: while result_offset < client.match_count:
logger.warning('====== While loop start for {} ======'.format(query)) logger.warning('====== While loop start for {} ======'.format(query))
logger.warning('result_offset: {}'.format(result_offset)) logger.warning('result_offset: {}'.format(result_offset))
results = client.show_query_results(result_len=result_len, results = client.show_query_results(result_len=result_len,
@ -71,7 +71,7 @@ def corpus_analysis(message):
# results['cpos_lookup'].update(results_append['cpos_lookup']) # results['cpos_lookup'].update(results_append['cpos_lookup'])
# results['text_lookup'].update(results_append['text_lookup']) # results['text_lookup'].update(results_append['text_lookup'])
result_offset += result_len result_offset += result_len
result_offset = min(result_offset, client.total_nr_matches) result_offset = min(result_offset, client.match_count)
logger.warning('result_offset end of while loop: {}'.format(result_offset)) logger.warning('result_offset end of while loop: {}'.format(result_offset))
socketio.emit('corpus_analysis', results, room=request.sid) socketio.emit('corpus_analysis', results, room=request.sid)

View File

@ -266,7 +266,7 @@
; ;
// create some global variables // create some global variables
var hitsPerPage; var hitsPerPage;
var full_results; var result;
var resultList; var resultList;
// Get query form element and save its data on submit and send this data via // Get query form element and save its data on submit and send this data via
@ -283,12 +283,12 @@
// full results object declaration, kind of global maybe store it later? // full results object declaration, kind of global maybe store it later?
// will always be reset if a query is sent, so that only the chunks of the // will always be reset if a query is sent, so that only the chunks of the
// current query will be saved in it // current query will be saved in it
full_results = {}; result = {};
full_results["matches"] = []; result["matches"] = [];
full_results["cpos_lookup"] = {}; result["cpos_lookup"] = {};
full_results["text_lookup"] = {}; result["text_lookup"] = {};
full_results["total_nr_matches"] = 0; result["match_count"] = 0;
full_results["query"] = ""; result["query"] = "";
// some hiding/showing for loading animation // some hiding/showing for loading animation
queryLoadingElement.classList.remove("hide"); queryLoadingElement.classList.remove("hide");
queryResultsTableElement.classList.add("hide"); queryResultsTableElement.classList.add("hide");
@ -318,19 +318,20 @@
// socket.on triggered when result chunks are recieved // socket.on triggered when result chunks are recieved
nopaque.socket.on("corpus_analysis", function(chunk) { nopaque.socket.on("corpus_analysis", function(response) {
// logs the current recieved chunk // logs the current recieved chunk
chunk = response["result"];
console.log("### corpus_analysis chunk ###"); console.log("### corpus_analysis chunk ###");
console.log(chunk); console.log(chunk);
// logs and extends/push/update the current recieved chunk to the // logs and extends/push/update the current recieved chunk to the
// full_results Object // result Object
console.log("### corpus analysis updated full_results json ###"); console.log("### corpus analysis updated result json ###");
full_results["matches"].push(...chunk["matches"]); result["matches"].push(...chunk["matches"]);
Object.assign(full_results["cpos_lookup"], chunk["cpos_lookup"]); Object.assign(result["cpos_lookup"], chunk["cpos_lookup"]);
Object.assign(full_results["text_lookup"], chunk["text_lookup"]); Object.assign(result["text_lookup"], chunk["text_lookup"]);
full_results["total_nr_matches"] = chunk["total_nr_matches"]; result["match_count"] = chunk["match_count"];
full_results["query"] = chunk["query"]; result["query"] = chunk["query"];
console.log(full_results); console.log(result);
// Some hiding and showing of loading animations // Some hiding and showing of loading animations
queryLoadingElement.classList.add("hide"); queryLoadingElement.classList.add("hide");
queryResultsTableElement.classList.remove("hide"); queryResultsTableElement.classList.remove("hide");
@ -352,9 +353,9 @@
// write metadata query information into HTML elements // write metadata query information into HTML elements
// like nr. of all matches in how many files etc. // like nr. of all matches in how many files etc.
// TODO: count_corpus_files müssen aus full results genommen werden. // TODO: count_corpus_files müssen aus full results genommen werden.
total_nr_matches = chunk["total_nr_matches"]; match_count = chunk["match_count"];
let count_corpus_files = Object.keys(chunk["text_lookup"]).length; let count_corpus_files = Object.keys(result["text_lookup"]).length;
queryResultsMetadataElement.innerHTML = chunk["total_nr_matches"] + " matches in " + count_corpus_files + " corpus files."; queryResultsMetadataElement.innerHTML = chunk["match_count"] + " matches in " + count_corpus_files + " corpus files.";
queryResultsMetadataElement.appendChild(exportQueryResults); queryResultsMetadataElement.appendChild(exportQueryResults);
exportQueryResults.classList.remove("hide"); exportQueryResults.classList.remove("hide");
@ -381,7 +382,9 @@
item = { titles: "test", lc: lc_tokens, hit: hit_tokens, rc: rc_tokens }; item = { titles: "test", lc: lc_tokens, hit: hit_tokens, rc: rc_tokens };
toAdd.push(item); toAdd.push(item);
} }
resultList.add(toAdd, function(toAdd) {console.log('All ' + toAdd.length + 'results were added!')}); resultList.add(toAdd, function(toAdd) {console.log('All '
+ toAdd.length
+ ' results were added!')});
} }
}); });