NEw analysis stuff

This commit is contained in:
Stephan Porada
2020-01-27 16:11:34 +01:00
parent b4427cd3ec
commit 4b0e9392a7
3 changed files with 97 additions and 71 deletions

View File

@ -224,12 +224,17 @@
{"dismissible": true});
M.Collapsible.init(elem, {accordion: false});
loadingModal.open();
nopaque.socket.emit("request_corpus_analysis", {{ corpus_id }});
nopaque.socket.emit("corpus_analysis_init", {{ corpus_id }});
});
// close loading modal if container for analysis has started
nopaque.socket.on("request_corpus_analysis", function(msg) {
if (msg === "[201]: Created") {loadingModal.close();}
nopaque.socket.on("corpus_analysis_init", function(response) {
if (response.code === 201) {
loadingModal.close();
} else {
console.log("[ERROR] corpus_analysis_init");
console.log(response);
}
});
// exper view stuff reuse maybe and REMOVE later
@ -275,9 +280,9 @@
queryFormElement.addEventListener("submit", function(event) {
event.preventDefault();
let formData = new FormData(queryFormElement);
let queryData = {"context": formData.get("context"),
"hits_per_page": formData.get("hits_per_page"),
"query": formData.get("query")};
queryData = {"context": formData.get("context"), // global declaration
"hits_per_page": formData.get("hits_per_page"),
"query": formData.get("query")};
hitsPerPage = formData.get("hits_per_page");
nopaque.socket.emit("corpus_analysis", queryData);
// full results object declaration, kind of global maybe store it later?
@ -318,7 +323,24 @@
// socket.on triggered when result chunks are recieved
nopaque.socket.on("corpus_analysis", function(response) {
nopaque.socket.on("corpus_analysis_query", function(response) {
// ERROR code checking
if (response["code"] === 0) {
console.log("[ERROR] corpus_analysis_init");
console.log("Code:" + response["code"]);
// further code execution of this code block starting in line 342
} else if (response["code"] === 1) {
queryResultsTableElement.classList.add("hide");
queryLoadingElement.classList.add("hide");
nopaque.toast("Invalid query entered!", "red");
console.log("[SUCCESS] corpus_analysis_init");
console.log("Code:" + response["code"]);
return; // no further code execution of this code block
} else {
console.log("[ERROR] corpus_analysis_init");
console.log("Code:" + response["code"]);
return; // no further code execution of this code block
}
// logs the current recieved chunk
chunk = response["result"];
console.log("### corpus_analysis chunk ###");
@ -330,62 +352,56 @@
Object.assign(result["cpos_lookup"], chunk["cpos_lookup"]);
Object.assign(result["text_lookup"], chunk["text_lookup"]);
result["match_count"] = chunk["match_count"];
result["query"] = chunk["query"];
result["query"] = queryData["query"];
console.log(result);
// Some hiding and showing of loading animations
queryLoadingElement.classList.add("hide");
queryResultsTableElement.classList.remove("hide");
queryResultsElement.innerHTML = "";
// some checks for erroneous or empty query results
// No results for this query
if (chunk === null) {
// check if query has any results
if (chunk["matches"].length === 0) {
queryResultsTableElement.classList.add("hide");
nopaque.toast("No results for this query!");
return;
// Query was invalid
} else if (chunk === "CQI_CQP_ERROR_GENERAL") {
queryResultsTableElement.classList.add("hide");
nopaque.toast("Invalid query entered!", "red");
return;
// List building/appending the chunks when query had results
} else {
// write metadata query information into HTML elements
// like nr. of all matches in how many files etc.
// TODO: count_corpus_files müssen aus full results genommen werden.
match_count = chunk["match_count"];
let count_corpus_files = Object.keys(result["text_lookup"]).length;
queryResultsMetadataElement.innerHTML = chunk["match_count"] + " matches in " + count_corpus_files + " corpus files.";
queryResultsMetadataElement.appendChild(exportQueryResults);
exportQueryResults.classList.remove("hide");
}
var toAdd = [];
for (let [index, match] of chunk["matches"].entries()) {
lc_tokens = "";
for (cpos of match["lc"]) {
word = chunk["cpos_lookup"][cpos]["word"];
lc_tokens += " " + word;
}
// console.log(lc_tokens);
hit_tokens = "";
for (cpos of match["hit"]) {
word = chunk["cpos_lookup"][cpos]["word"];
hit_tokens += " " + word;
}
// console.log(hit_tokens);
rc_tokens = "";
for (cpos of match["rc"]) {
word = chunk["cpos_lookup"][cpos]["word"];
rc_tokens += " " + word;
}
// console.log(rc_tokens);
item = { titles: "test", lc: lc_tokens, hit: hit_tokens, rc: rc_tokens };
toAdd.push(item);
}
resultList.add(toAdd, function(toAdd) {console.log('All '
+ toAdd.length
+ ' results were added!')});
}
// List building/appending the chunks when query had results
// write metadata query information into HTML elements
// like nr. of all matches in how many files etc.
// TODO: count_corpus_files müssen aus full results genommen werden.
match_count = chunk["match_count"];
let count_corpus_files = Object.keys(result["text_lookup"]).length;
queryResultsMetadataElement.innerHTML = chunk["match_count"] + " matches in " + count_corpus_files + " corpus files.";
queryResultsMetadataElement.appendChild(exportQueryResults);
exportQueryResults.classList.remove("hide");
var toAdd = [];
for (let [index, match] of chunk["matches"].entries()) {
lc_tokens = "";
for (cpos of match["lc"]) {
word = chunk["cpos_lookup"][cpos]["word"];
lc_tokens += " " + word;
}
// console.log(lc_tokens);
hit_tokens = "";
for (cpos of match["hit"]) {
word = chunk["cpos_lookup"][cpos]["word"];
hit_tokens += " " + word;
}
// console.log(hit_tokens);
rc_tokens = "";
for (cpos of match["rc"]) {
word = chunk["cpos_lookup"][cpos]["word"];
rc_tokens += " " + word;
}
// console.log(rc_tokens);
item = { titles: "test", lc: lc_tokens, hit: hit_tokens, rc: rc_tokens };
toAdd.push(item);
}
resultList.add(toAdd, function(toAdd) {console.log('All '
+ toAdd.length
+ ' results were added!')});
});
// Function to download data to a file