mirror of
https://gitlab.ub.uni-bielefeld.de/sfb1288inf/nopaque.git
synced 2024-12-24 10:34:17 +00:00
Rename some stuff
This commit is contained in:
parent
cd02046590
commit
2b1e8f34de
@ -44,6 +44,7 @@ class CQiWrapper(CQiClient):
|
||||
'''
|
||||
p_attrs = self.corpus_positional_attributes(self.corpus_name)
|
||||
struct_attrs = self.corpus_structural_attributes(self.corpus_name)
|
||||
logger.warning('struct_attrs: {}'.format(struct_attrs))
|
||||
self.attr_strings = {}
|
||||
self.attr_strings['positional_attrs'] = {}
|
||||
self.attr_strings['struct_attrs'] = {}
|
||||
@ -100,9 +101,8 @@ class CQiWrapper(CQiClient):
|
||||
+ ':'
|
||||
+ result_subcorpus_name)
|
||||
self.SUBCORPUS_NAMES.append(self.result_subcorpus)
|
||||
self.total_nr_matches = self.cqp_subcorpus_size(self.result_subcorpus)
|
||||
logger.warning('Nr of all matches is: {}'.format(self.total_nr_matches))
|
||||
logger.warning('Nr of all matches is: {}'.format(self.total_nr_matches))
|
||||
self.match_count = self.cqp_subcorpus_size(self.result_subcorpus)
|
||||
logger.warning('Nr of all matches is: {}'.format(self.match_count))
|
||||
|
||||
def show_subcorpora(self):
|
||||
'''
|
||||
@ -136,7 +136,7 @@ class CQiWrapper(CQiClient):
|
||||
self.corpus_max_len = self.cl_attribute_size(
|
||||
self.attr_strings['positional_attrs']['word']
|
||||
)
|
||||
self.nr_matches = min(result_len, self.total_nr_matches)
|
||||
self.nr_matches = min(result_len, self.match_count)
|
||||
if self.nr_matches == 0:
|
||||
logger.warning('Query resulted in 0 matches.')
|
||||
return None
|
||||
@ -147,7 +147,7 @@ class CQiWrapper(CQiClient):
|
||||
# [(1355, 1357), (1477, 1479)] Example for two boundry pairs
|
||||
offset_start = 0 if result_offset == 0 else result_offset
|
||||
logger.warning('Offset start is: {}'.format(offset_start))
|
||||
offset_end = min((self.nr_matches + result_offset - 1), self.total_nr_matches - 1)
|
||||
offset_end = min((self.nr_matches + result_offset - 1), self.match_count - 1)
|
||||
logger.warning('Offset end is: {}'.format(offset_end))
|
||||
match_boundaries = zip(self.cqp_dump_subcorpus(self.result_subcorpus,
|
||||
CONST_FIELD_MATCH,
|
||||
@ -196,11 +196,12 @@ class CQiWrapper(CQiClient):
|
||||
t_final = t3 - t2
|
||||
logger.warning('Got infos for {} CPOS in {} seconds:'.format(len_all_cpos,
|
||||
t_final))
|
||||
self.results = {'matches': all_matches,
|
||||
self.results = {'code': 0,
|
||||
'result': {'matches': all_matches,
|
||||
'match_count': self.match_count,
|
||||
'cpos_lookup': all_cpos_infos,
|
||||
'text_lookup': text_lookup,
|
||||
'total_nr_matches': self.total_nr_matches,
|
||||
'query': self.query}
|
||||
'text_lookup': text_lookup,}
|
||||
}
|
||||
return self.results
|
||||
|
||||
def get_cpos_infos(self, all_cpos):
|
||||
|
@ -61,7 +61,7 @@ def corpus_analysis(message):
|
||||
result_offset=result_offset)
|
||||
result_offset += result_len # initial offfset is plus result len because client.show_query_results has been already executed once
|
||||
socketio.emit('corpus_analysis', results, room=request.sid)
|
||||
while result_offset < client.total_nr_matches:
|
||||
while result_offset < client.match_count:
|
||||
logger.warning('====== While loop start for {} ======'.format(query))
|
||||
logger.warning('result_offset: {}'.format(result_offset))
|
||||
results = client.show_query_results(result_len=result_len,
|
||||
@ -71,7 +71,7 @@ def corpus_analysis(message):
|
||||
# results['cpos_lookup'].update(results_append['cpos_lookup'])
|
||||
# results['text_lookup'].update(results_append['text_lookup'])
|
||||
result_offset += result_len
|
||||
result_offset = min(result_offset, client.total_nr_matches)
|
||||
result_offset = min(result_offset, client.match_count)
|
||||
logger.warning('result_offset end of while loop: {}'.format(result_offset))
|
||||
socketio.emit('corpus_analysis', results, room=request.sid)
|
||||
|
||||
|
@ -266,7 +266,7 @@
|
||||
;
|
||||
// create some global variables
|
||||
var hitsPerPage;
|
||||
var full_results;
|
||||
var result;
|
||||
var resultList;
|
||||
|
||||
// Get query form element and save its data on submit and send this data via
|
||||
@ -283,12 +283,12 @@
|
||||
// full results object declaration, kind of global maybe store it later?
|
||||
// will always be reset if a query is sent, so that only the chunks of the
|
||||
// current query will be saved in it
|
||||
full_results = {};
|
||||
full_results["matches"] = [];
|
||||
full_results["cpos_lookup"] = {};
|
||||
full_results["text_lookup"] = {};
|
||||
full_results["total_nr_matches"] = 0;
|
||||
full_results["query"] = "";
|
||||
result = {};
|
||||
result["matches"] = [];
|
||||
result["cpos_lookup"] = {};
|
||||
result["text_lookup"] = {};
|
||||
result["match_count"] = 0;
|
||||
result["query"] = "";
|
||||
// some hiding/showing for loading animation
|
||||
queryLoadingElement.classList.remove("hide");
|
||||
queryResultsTableElement.classList.add("hide");
|
||||
@ -318,19 +318,20 @@
|
||||
|
||||
|
||||
// socket.on triggered when result chunks are recieved
|
||||
nopaque.socket.on("corpus_analysis", function(chunk) {
|
||||
nopaque.socket.on("corpus_analysis", function(response) {
|
||||
// logs the current recieved chunk
|
||||
chunk = response["result"];
|
||||
console.log("### corpus_analysis chunk ###");
|
||||
console.log(chunk);
|
||||
// logs and extends/push/update the current recieved chunk to the
|
||||
// full_results Object
|
||||
console.log("### corpus analysis updated full_results json ###");
|
||||
full_results["matches"].push(...chunk["matches"]);
|
||||
Object.assign(full_results["cpos_lookup"], chunk["cpos_lookup"]);
|
||||
Object.assign(full_results["text_lookup"], chunk["text_lookup"]);
|
||||
full_results["total_nr_matches"] = chunk["total_nr_matches"];
|
||||
full_results["query"] = chunk["query"];
|
||||
console.log(full_results);
|
||||
// result Object
|
||||
console.log("### corpus analysis updated result json ###");
|
||||
result["matches"].push(...chunk["matches"]);
|
||||
Object.assign(result["cpos_lookup"], chunk["cpos_lookup"]);
|
||||
Object.assign(result["text_lookup"], chunk["text_lookup"]);
|
||||
result["match_count"] = chunk["match_count"];
|
||||
result["query"] = chunk["query"];
|
||||
console.log(result);
|
||||
// Some hiding and showing of loading animations
|
||||
queryLoadingElement.classList.add("hide");
|
||||
queryResultsTableElement.classList.remove("hide");
|
||||
@ -352,9 +353,9 @@
|
||||
// write metadata query information into HTML elements
|
||||
// like nr. of all matches in how many files etc.
|
||||
// TODO: count_corpus_files müssen aus full results genommen werden.
|
||||
total_nr_matches = chunk["total_nr_matches"];
|
||||
let count_corpus_files = Object.keys(chunk["text_lookup"]).length;
|
||||
queryResultsMetadataElement.innerHTML = chunk["total_nr_matches"] + " matches in " + count_corpus_files + " corpus files.";
|
||||
match_count = chunk["match_count"];
|
||||
let count_corpus_files = Object.keys(result["text_lookup"]).length;
|
||||
queryResultsMetadataElement.innerHTML = chunk["match_count"] + " matches in " + count_corpus_files + " corpus files.";
|
||||
queryResultsMetadataElement.appendChild(exportQueryResults);
|
||||
exportQueryResults.classList.remove("hide");
|
||||
|
||||
@ -381,7 +382,9 @@
|
||||
item = { titles: "test", lc: lc_tokens, hit: hit_tokens, rc: rc_tokens };
|
||||
toAdd.push(item);
|
||||
}
|
||||
resultList.add(toAdd, function(toAdd) {console.log('All ' + toAdd.length + 'results were added!')});
|
||||
resultList.add(toAdd, function(toAdd) {console.log('All '
|
||||
+ toAdd.length
|
||||
+ ' results were added!')});
|
||||
}
|
||||
});
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user