/*
* This file is part of PKM (Persistent Knowledge Monitor).
* Copyright (c) 2020 Capgemini Group, Commissariat à l'énergie atomique et aux énergies alternatives,
* OW2, Sysgo AG, Technikon, Tree Technology, Universitat Politècnica de València.
*
* PKM is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License version 3 as published by
* the Free Software Foundation.
*
* PKM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PKM. If not, see <https://www.gnu.org/licenses/>.
*/
/** Postprocess logs.
*
* @memberof PKM
* @instance
* @param {string} dbName - Database name
* @param {Array.<Object>} log_chunk_documents - Log chunk documents
* @param {Object} [options] - options
*
* @return {Promise<Array.<Object>>} a promise
*/
function postprocess_logs(log_chunk_documents, options = {})
{
return new Promise(function(resolve, reject)
{
let log_documents = [];
// the log chunk document may be out-of-order, so we first need to reorder the chunks for each ID
let chunks_per_id = new Map();
log_chunk_documents.forEach((log_chunk_document) =>
{
if(chunks_per_id.has(log_chunk_document.id))
{
let chunks = chunks_per_id.get(log_chunk_document.id);
if(!log_chunk_document.hasOwnProperty('chunkId'))
{
throw Error('Log database object ID ' + log_chunk_document.id + ' has no chunk field');
}
if(log_chunk_document.chunkId > chunks.length)
{
while(log_chunk_document.chunkId > chunks.length) chunks.push(undefined); // expand chunks array with holes (holes shall vanish before the end of the process)
chunks.push(log_chunk_document);
}
else
{
chunks[log_chunk_document.chunkId] = log_chunk_document;
}
}
else
{
let chunks = [];
if(log_chunk_document.hasOwnProperty('chunkId'))
{
while(log_chunk_document.chunkId > chunks.length) chunks.push(undefined); // expand chunks array with holes (holes shall vanish before the end of the process)
}
chunks.push(log_chunk_document);
const id = log_chunk_document.hasOwnProperty('id') ? log_chunk_document.id : (log_chunk_document.id = log_chunk_document._id.toHexString()); // uses _id instead of id for very old databases, and replicates it in id
delete log_chunk_document._id; // keep only id
chunks_per_id.set(id, chunks);
}
});
// for each ID
Array.from(chunks_per_id.keys()).forEach((id) =>
{
let chunks = chunks_per_id.get(id);
// check that the integrity of chunks, in particular that there are no holes
chunks.forEach((chunk) =>
{
if(chunk === undefined)
{
throw this.InternalServerError('Log chunks for ID ' + id + ' have holes');
}
});
let log_chunk_document = chunks[0];
let log_document;
if(log_chunk_document.hasOwnProperty('content'))
{
// merge content of the chunks
let content = chunks.map((chunk) => chunk.content).join('');
delete log_chunk_document.content;
log_document;
try
{
log_document = JSON.parse(content);
}
catch(err)
{
throw this.InternalServerError(err);
}
}
else
{
log_document = {};
}
Object.keys(log_chunk_document).forEach((key) =>
{
log_document[key] = log_chunk_document[key];
});
log_documents.push(log_document);
});
if(options.sort !== undefined)
{
let compare_values = function(v1, v2)
{
if((typeof v1 === 'object') || (typeof v2 === 'object'))
{
v1 = JSON.stringify(v1);
v2 = JSON.stringify(v2);
}
return (v1 < v2) ? -1 : ((v1 > v2) ? 1 : 0);
}
let compare_logs = function(log1, log2)
{
let r;
let keys = Object.keys(options.sort);
for(let i = 0; i < keys.length; ++i)
{
const key = keys[i];
const value = options.sort[key];
const v1 = log1[key];
const v2 = log2[key];
r = compare_values(v1, v2);
if(value == -1) r = -r;
if(r != 0) break;
}
return r;
}
let sorted_log_documents = log_documents.sort(compare_logs);
// serve the sorted log documents
resolve(sorted_log_documents);
}
else
{
// serve the log documents
resolve(log_documents);
}
}.bind(this));
}
module.exports.postprocess_logs = postprocess_logs;