Remove gzip compression

This commit is contained in:
Patrick Jentsch 2019-11-25 10:29:05 +01:00
parent 3d8d2b3e48
commit eeed9268bb
4 changed files with 15 additions and 6859 deletions

View File

@ -55,18 +55,7 @@ def corpus_analysis(message):
results = client.show_query_results(
result_len=int(message['hits_per_page']),
context_len=int(message['context']))
# logger.warning('RESULTS: {}'.format(results))
size_internal_dict = sys.getsizeof(results) / 1000000
size_dict_to_str = sys.getsizeof(str(results)) / 1000000
compressed_str = gzip.compress(str(results).encode())
size_dict_to_str_compressed = sys.getsizeof(compressed_str) / 1000000
zlib_compressed = zlib.compress(json.dumps(results).encode('utf-8'))
size_zlib_compressed = sys.getsizeof(zlib_compressed) / 1000000
logger.warning('Internal size of dict for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_internal_dict))
logger.warning('Size of dict as raw string for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_dict_to_str))
logger.warning('Size of gzip compressed dict to string for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_dict_to_str_compressed))
logger.warning('Size of zlib compressed and utf-8 encoded string for {} hits per page and context len {}: {} MB'.format(message['hits_per_page'], message['context'], size_zlib_compressed))
socketio.emit('corpus_analysis', zlib_compressed, room=request.sid)
socketio.emit('corpus_analysis', results, room=request.sid)
def corpus_analysis_session_handler(app, corpus_id, session_id):

File diff suppressed because it is too large Load Diff

View File

@ -19,7 +19,6 @@
<script src="{{ url_for('static', filename='js/CorpusList.js') }}"></script>
<script src="{{ url_for('static', filename='js/JobList.js') }}"></script>
<script src="{{ url_for('static', filename='js/darkreader.js') }}"></script>
<script src="{{ url_for('static', filename='js/pako.js') }}"></script>
<script>
{% if current_user.is_dark == True %}
DarkReader.enable({

View File

@ -115,37 +115,12 @@
M.toast({html: 'Query has been sent!'});
});
function decodeResults(resultsByteArray) {
console.log(resultsByteArray);
var decompressedData = pako.inflate(resultsByteArray); // decompresses the recieved ArrayBuffer holding the compressed Byte data
console.log(decompressedData);
utf8decoder = new TextDecoder(); // default is utf-8
var decodedStrData = utf8decoder.decode(decompressedData); // decodes the decompressed Uint8Array as an utf-8 string
console.log(decodedStrData);
return decodedStrData
}
function getResultInfos(matchObject) {
for (var key in matchObject) {
var token = matchObject[key];
for (var key in token) {
infos += token[key]['word'] + ' ';
}
var infos = infos;
}
return infos
}
socket.on('corpus_analysis', function(results) {
console.log(results);
var decodedJSONStr = decodeResults(results);
var results = JSON.parse(decodedJSONStr);
if (results === null){
if (results === null) {
M.toast({html: 'Query has no results!'});
}
else {
console.log(results);
} else {
html_txt = '<table class="highlight"> <thead><tr><th>Left context</th><th>Match</th><th>Right Context</th></tr></thead>';
for (var key in results) {
var hit = results[key];
for (let [key, hit] of Object.entries(results)) {
var left_context = hit['context_before_cpos_list']
var match = hit['match_cpos_list']
var right_context = hit['context_after_cpos_list']
@ -161,5 +136,16 @@
queryResultsElement.innerHTML = html_txt;
}
});
function getResultInfos(matchObject) {
infos = '';
for (var key in matchObject) {
var token = matchObject[key];
for (var key in token) {
infos += token[key]['word'] + ' ';
}
var infos = infos;
}
return infos
}
</script>
{% endblock %}