Work on new list bilding

This commit is contained in:
Stephan Porada 2020-01-29 16:12:57 +01:00
parent c1adcb93ee
commit da4cc75943
4 changed files with 98 additions and 39 deletions

View File

@ -233,20 +233,26 @@ class CQiWrapper(CQiClient):
tmp_info[struct_attr_key].append(id)
else:
structs_to_check.append({key: struct_attr_key})
logger.warning('Structs to check: {}'.format(structs_to_check))
struct_attr_values = list(tmp_info.values())
# logger.warning('Struct attr value list: {}'.format(struct_attr_values))
struct_attr_keys = list(tmp_info.keys())
# logger.warning('Struct attr key list: {}'.format(struct_attr_keys))
# Build textlookup dictionary
text_lookup_ids = list(set(struct_attr_values[0])) # First is always one text
text_lookup = {}
text_lookup_ids = list(set(struct_attr_values[0])) # every CPOS is associated with one text id. A set is build to only gather text_lookup informations for every unique text id
text_lookup = {} # final dict containing all info of one text identified by its id
for d in structs_to_check:
s_key, s_value = zip(*d.items())
s_value = s_value[0].split('_', 1)[1]
logger.warning('dict entries: {}: {}'.format(s_key, s_value))
s_value = s_value[0].split('_', 1)[-1]
logger.warning('S_VALUE: {}'.format(s_value))
struct_values = self.cl_struc2str(s_key[0], text_lookup_ids)
logger.warning('Extracted Value with key {}: {}'.format(s_key[0],struct_values))
zipped = dict(zip(text_lookup_ids, struct_values))
for zip_key, zip_value in zipped.items():
logger.warning('key: {}'.format(zip_key))
logger.warning('value: {}'.format(zip_value))
logger.warning('Text id as key is: {}'.format(zip_key))
logger.warning('Value of this text is: {}'.format(zip_value))
check = text_lookup.get(zip_key)
logger.warning('check: {}'.format(check))
if check is None:

View File

@ -1,3 +0,0 @@
class JobList extends List {
}

View File

@ -243,6 +243,74 @@ class JobList extends List {
return rowElement;
}
}
class ResultList extends List {
createResultRowElement(item) {
let values, cpos, matchRowElement, lcCellElement, lcTokenElement, token;
// gather values from item
values = item.values();
// get infos for full match row
matchRowElement = document.createElement("tr");
for (cpos of values["lc"]) {
console.log(cpos);
lcCellElement = document.createElement("td");
lcTokenElement = document.createElement("span");
lcTokenElement.classList.add("token");
lcTokenElement.dataset.cpos = cpos;
token = chunk["cpos_lookup"][cpos];
lcTokenElement = token["word"];
console.log(lcTokenElement.outerHTML);
// let hit_tokens = "";
}
// // get infos of match
// let textTitles = new Set();
// for (cpos of match["hit"]) {
// tokenElement = document.createElement("span");
// tokenElement.classList.add("token");
// tokenElement.dataset.cpos = cpos;
// token = chunk["cpos_lookup"][cpos];
// tokenElement.innerText = token["word"];
// hit_tokens += " " + tokenElement.outerHTML;
// // get text titles of every hit cpos token
// textTitles.add(chunk["text_lookup"][token["text"]]["title"]);
// }
// // add button to trigger more context to every match td
// var inspectBtn = document.createElement("a");
// inspectBtn.setAttribute("class", "btn-floating btn-flat waves-effect waves-light grey right inspect");
// inspectBtn.onclick = function() {inspect()};
// inspectBtn.innerHTML = '<i class="material-icons">search</i>';
// hit_tokens += "<p>" + inspectBtn.outerHTML + "</p>";
// // get infos for right context of match
// let rc_tokens = "";
// for (cpos of match["rc"]) {
// tokenElement = document.createElement("span");
// tokenElement.classList.add("token");
// tokenElement.dataset.cpos = cpos;
// token = chunk["cpos_lookup"][cpos];
// tokenElement.innerText = token["word"];
// rc_tokens += " " + tokenElement.outerHTML;
// }
// // put all infos into an javascribt object
// textTitleElement = document.createElement("span");
// textTitleElement.classList.add("text-titles");
// textTitles = [...textTitles].join(",");
// textTitleElement.innerText = textTitles;
//
// matchRowElement.appendChild(textTitleElement);
// // matchRowElement.appendChild(lc_tokens);
// // matchRowElement.appendChild(hit_tokens);
// // matchRowElement.appendChild(rc_tokens);
// // matchRowElement.appendChild(index);
// }
// return matchRowElement
}
}
JobList.DEFAULT_OPTIONS = {item: "<br>",
page: 4,
pagination: {innerWindow: 8, outerWindow: 1},

View File

@ -310,15 +310,14 @@
innerWindow: 8,
outerWindow: 1
}],
valueNames: ["titles", "lc", "hit", "rc"],
valueNames: ["titles", "lc", "hit", "rc", {data: ["index"]}],
item: `<tr>
<td class="titles"></td>
<td class="lc"></td>
<td class="hit"></td>
<td class="rc"></td>
</tr>`};
resultList = new List('result-list', options);
resultList = new ResultList('result-list', options);
});
@ -326,14 +325,14 @@
nopaque.socket.on("corpus_analysis_query", function(response) {
// ERROR code checking
if (response["code"] === 0) {
console.log("[ERROR] corpus_analysis_init");
console.log("[SUCCESS] corpus_analysis_init");
console.log("Code:" + response["code"]);
// further code execution of this code block starting in line 342
} else if (response["code"] === 1) {
queryResultsTableElement.classList.add("hide");
queryLoadingElement.classList.add("hide");
nopaque.toast("Invalid query entered!", "red");
console.log("[SUCCESS] corpus_analysis_init");
console.log("[ERROR] corpus_analysis_init");
console.log("Code:" + response["code"]);
return; // no further code execution of this code block
} else {
@ -369,40 +368,29 @@
// List building/appending the chunks when query had results
// write metadata query information into HTML elements
// like nr. of all matches in how many files etc.
// TODO: count_corpus_files müssen aus full results genommen werden.
// TODO: count_corpus_files müssen aus full results genommen werden. Ist am Ende richtig aber dazwischen zählt es hoch
match_count = chunk["match_count"];
let count_corpus_files = Object.keys(result["text_lookup"]).length;
queryResultsMetadataElement.innerHTML = chunk["match_count"] + " matches in " + count_corpus_files + " corpus files.";
queryResultsMetadataElement.appendChild(exportQueryResults);
exportQueryResults.classList.remove("hide");
var toAdd = [];
var resultItems = []; // list for holding every row item
// get infos for full match row
for (let [index, match] of chunk["matches"].entries()) {
lc_tokens = "";
for (cpos of match["lc"]) {
word = chunk["cpos_lookup"][cpos]["word"];
lc_tokens += " " + word;
}
// console.log(lc_tokens);
hit_tokens = "";
for (cpos of match["hit"]) {
word = chunk["cpos_lookup"][cpos]["word"];
hit_tokens += " " + word;
}
// console.log(hit_tokens);
rc_tokens = "";
for (cpos of match["rc"]) {
word = chunk["cpos_lookup"][cpos]["word"];
rc_tokens += " " + word;
}
// console.log(rc_tokens);
item = { titles: "test", lc: lc_tokens, hit: hit_tokens, rc: rc_tokens };
toAdd.push(item);
resultItems.push({...match, ...{"index": index}});
}
resultList.add(resultItems, items => {
for (let item of items) {
item.elm = resultList.createResultRowElement(item);}
});
resultList.update();
});
// inspect match functions
function inspect() {
console.log("Inspect!")
}
resultList.add(toAdd, function(toAdd) {console.log('All '
+ toAdd.length
+ ' results were added!')});
});
// Function to download data to a file
function download(downloadElem, data, filename, type) {