Merge pull request #1181 from CartoDB/feature/ch88712/node-windshaft-metro-service-is-not-started

Fix: TypeError: Cannot read property 'level' of undefined
This commit is contained in:
Daniel G. Aubert 2020-07-01 08:58:26 +02:00 committed by GitHub
commit cb17bba3f5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 64 additions and 18 deletions

View File

@ -4,8 +4,26 @@ const split = require('split2');
const logCollector = require('./log-collector'); const logCollector = require('./log-collector');
const metricsCollector = require('./metrics-collector'); const metricsCollector = require('./metrics-collector');
process.stdin const streams = [process.stdin, split(), logCollector(), metricsCollector(), process.stdout]
.pipe(split())
.pipe(logCollector()) pipeline('pipe', streams);
.pipe(metricsCollector())
.pipe(process.stdout); process.on('SIGINT', exitProcess(0));
process.on('SIGTERM', exitProcess(0));
process.on('uncaughtException', exitProcess(1));
process.on('unhandledRejection', exitProcess(1));
function pipeline (action, streams) {
for (let index = 0; index < streams.length - 1; index++) {
const source = streams[index];
const destination = streams[index + 1];
source[action](destination);
}
}
function exitProcess (code = 0) {
return function exitProcess (signal) {
pipeline('unpipe', streams);
process.exit(code);
};
}

View File

@ -1,10 +1,13 @@
'use strict' 'use strict'
const fs = require('fs');
const split = require('split2'); const split = require('split2');
const assingDeep = require('assign-deep'); const assingDeep = require('assign-deep');
const { Transform } = require('stream'); const { Transform } = require('stream');
const DEV_ENVS = ['test', 'development']; const DEV_ENVS = ['test', 'development'];
const logs = new Map(); const dumpPath = `${__dirname}/dump.json`;
let logs;
const LEVELS = { const LEVELS = {
10: 'trace', 10: 'trace',
@ -16,7 +19,7 @@ const LEVELS = {
} }
module.exports = function logCollector () { module.exports = function logCollector () {
return new Transform({ const stream = new Transform({
transform (chunk, enc, callback) { transform (chunk, enc, callback) {
let entry; let entry;
@ -34,7 +37,7 @@ module.exports = function logCollector () {
return callback(); return callback();
} }
const { id, end } = entry; const { id } = entry;
if (id === undefined) { if (id === undefined) {
entry.level = LEVELS[entry.level]; entry.level = LEVELS[entry.level];
@ -42,17 +45,17 @@ module.exports = function logCollector () {
return callback(); return callback();
} }
if (end === true) {
const accEntry = logs.get(id);
accEntry.level = LEVELS[accEntry.level];
accEntry.time = entry.time;
this.push(`${JSON.stringify(accEntry)}\n`);
logs.delete(id);
return callback();
}
if (logs.has(id)) { if (logs.has(id)) {
const accEntry = logs.get(id); const accEntry = logs.get(id);
const { end } = entry;
if (end === true) {
accEntry.level = LEVELS[accEntry.level];
accEntry.time = entry.time;
this.push(`${JSON.stringify(accEntry)}\n`);
logs.delete(id);
return callback();
}
if (accEntry.level > entry.level) { if (accEntry.level > entry.level) {
delete entry.level delete entry.level
@ -70,7 +73,32 @@ module.exports = function logCollector () {
callback(); callback();
} }
}) });
stream.on('pipe', () => {
if (!fs.existsSync(dumpPath)) {
logs = new Map();
return;
}
try {
const dump = require(dumpPath);
logs = new Map(dump);
} catch (err) {
console.error(`Cannot read the dump for unfinished logs: ${err}`);
logs = new Map();
}
});
stream.on('unpipe', () => {
try {
fs.writeFileSync(dumpPath, JSON.stringify([...logs]));
} catch (err) {
console.error(`Cannot create a dump for unfinished logs: ${err}`);
}
});
return stream;
} }
function hasProperty(obj, prop) { function hasProperty(obj, prop) {