Source: core/preprocess_logs.js

/*
 * This file is part of PKM (Persistent Knowledge Monitor).
 * Copyright (c) 2020 Capgemini Group, Commissariat à l'énergie atomique et aux énergies alternatives,
 *                    OW2, Sysgo AG, Technikon, Tree Technology, Universitat Politècnica de València.
 * 
 * PKM is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Affero General Public License version 3 as published by
 * the Free Software Foundation.
 * 
 * PKM is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU Affero General Public License for more details.
 * 
 * You should have received a copy of the GNU Affero General Public License
 * along with this.  If not, see <https://www.gnu.org/licenses/>.
 */

/** Preprocess log documents
 * 
 * @memberof PKM
 * @instance
 * @param {Array.<Object>} log_documents - log documents
 * @param {Object} [options] - options
 * 
 * @return {Promise<Array.<Object>>} a promise
 */
function preprocess_logs(log_documents, options = {})
{
	return new Promise(function(resolve, reject)
	{
		let log_chunk_documents = [];
	
		try
		{
			log_documents.forEach((log_document) =>
			{
				[ 'start running time', 'end running time' ].forEach((key) =>
				{
					if(log_document.hasOwnProperty(key) && !(log_document[key] instanceof Date))
					{
						let date = new Date(log_document[key]);
						if((date instanceof Date) && isNaN(date.getTime()))
						{
							throw this.BadRequest('Invalidate date format for \'' + key + '\'');
						}
						log_document[key] = date;
					}
				});
				
				let raw_log_document = {};
					
				[ 'messages', 'warnings', 'errors', 'details' ].forEach((key) =>
				{
					if(log_document.hasOwnProperty(key))
					{
						raw_log_document[key] = log_document[key];
					}
				});
			
				let content = JSON.stringify(raw_log_document);
				
				if(content.length > this.LogChunkSizeThreshold)
				{
					let chunk_id = 0;
					let offset = 0;
					
					while(offset < content.length)
					{
						let log_chunk_document =
						{
							chunkId : chunk_id,
							content : content.slice(offset, offset + this.LogChunkSizeThreshold),
						};
						
						Object.keys(log_document).forEach((key) =>
						{
							if(![ 'messages', 'warnings', 'errors', 'details', 'content' ].includes(key))
							{
								log_chunk_document[key] = log_document[key];
							}
						});
					
						log_chunk_documents.push(log_chunk_document);
						offset += this.LogChunkSizeThreshold;
						++chunk_id;
					}
				}
				else
				{
					log_chunk_documents.push(log_document);
				}
			});
		}
		catch(err)
		{
			reject(this.Error(err));
			return;
		}
		
		resolve(log_chunk_documents);
	}.bind(this));
}

module.exports.preprocess_logs = preprocess_logs;