Implement batch logger to log query times when queries are defined with id
This commit is contained in:
parent
89f9a99e44
commit
aa0ce62a85
2
app.js
2
app.js
@ -99,6 +99,8 @@ process.on('SIGHUP', function() {
|
||||
global.logger = global.log4js.getLogger();
|
||||
console.log('Log files reloaded');
|
||||
});
|
||||
|
||||
server.batch.logger.reopenFileStreams();
|
||||
});
|
||||
|
||||
process.on('SIGTERM', function () {
|
||||
|
86
batch/batch-logger.js
Normal file
86
batch/batch-logger.js
Normal file
@ -0,0 +1,86 @@
|
||||
'use strict';
|
||||
|
||||
var bunyan = require('bunyan');
|
||||
var fs = require('fs');
|
||||
|
||||
var debug = require('./util/debug')('batch-logger');
|
||||
|
||||
var JobUtils = require('./models/job_state_machine');
|
||||
var jobUtils = new JobUtils();
|
||||
var JobFallback = require('./models/job_fallback');
|
||||
|
||||
function BatchLogger (path) {
|
||||
this.path = path;
|
||||
this.stream = this.path ? fs.createWriteStream(this.path, { flags: 'a', encoding: 'utf8' }) : process.stdout;
|
||||
this.logger = bunyan.createLogger({
|
||||
name: 'batch-queries',
|
||||
streams: [{
|
||||
level: 'info',
|
||||
stream: this.stream
|
||||
}]
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = BatchLogger;
|
||||
|
||||
BatchLogger.prototype.log = function (job) {
|
||||
if (!isFinished(job)) {
|
||||
return;
|
||||
}
|
||||
|
||||
var queries = job.data.query.query;
|
||||
|
||||
for (var i = 0; i < queries.length; i++) {
|
||||
var query = queries[i];
|
||||
|
||||
if (!query.id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var node = parseQueryId(query.id);
|
||||
var output = {
|
||||
username: job.data.user,
|
||||
job: job.data.job_id,
|
||||
analysis: node.analysis,
|
||||
node: node.id,
|
||||
type: node.type,
|
||||
elapsedTime: calculateElpasedTime(query.started_at, query.ended_at)
|
||||
};
|
||||
|
||||
debug('analysis %j', output);
|
||||
|
||||
this.logger.info(output);
|
||||
}
|
||||
};
|
||||
|
||||
function isFinished (job) {
|
||||
return job instanceof JobFallback &&
|
||||
jobUtils.isFinalStatus(job.data.status) &&
|
||||
(!job.data.fallback_status || jobUtils.isFinalStatus(job.data.fallback_status));
|
||||
}
|
||||
|
||||
BatchLogger.prototype.reopenFileStreams = function () {
|
||||
this.logger.reopenFileStreams();
|
||||
};
|
||||
|
||||
function parseQueryId (queryId) {
|
||||
var data = queryId.split(':');
|
||||
|
||||
return {
|
||||
analysis: data[0],
|
||||
id: data[1],
|
||||
type: data[2]
|
||||
};
|
||||
}
|
||||
|
||||
function calculateElpasedTime (started_at, ended_at) {
|
||||
if (!started_at || !ended_at) {
|
||||
return;
|
||||
}
|
||||
|
||||
var start = new Date(started_at);
|
||||
var end = new Date(ended_at);
|
||||
var elapsedTimeMilliseconds = end.getTime() - start.getTime();
|
||||
|
||||
return elapsedTimeMilliseconds;
|
||||
}
|
@ -7,12 +7,13 @@ var forever = require('./util/forever');
|
||||
var queue = require('queue-async');
|
||||
var jobStatus = require('./job_status');
|
||||
|
||||
function Batch(jobSubscriber, jobQueuePool, jobRunner, jobService) {
|
||||
function Batch(jobSubscriber, jobQueuePool, jobRunner, jobService, logger) {
|
||||
EventEmitter.call(this);
|
||||
this.jobSubscriber = jobSubscriber;
|
||||
this.jobQueuePool = jobQueuePool;
|
||||
this.jobRunner = jobRunner;
|
||||
this.jobService = jobService;
|
||||
this.logger = logger;
|
||||
}
|
||||
util.inherits(Batch, EventEmitter);
|
||||
|
||||
@ -90,6 +91,8 @@ Batch.prototype._consumeJobs = function (host, queue, callback) {
|
||||
debug('Job %s %s in %s', job_id, job.data.status, host);
|
||||
}
|
||||
|
||||
self.logger.log(job);
|
||||
|
||||
self.emit('job:' + job.data.status, job_id);
|
||||
|
||||
callback();
|
||||
|
@ -1,6 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
var RedisPool = require('redis-mpool');
|
||||
var _ = require('underscore');
|
||||
var JobRunner = require('./job_runner');
|
||||
@ -14,9 +13,10 @@ var JobPublisher = require('./job_publisher');
|
||||
var JobQueue = require('./job_queue');
|
||||
var JobBackend = require('./job_backend');
|
||||
var JobService = require('./job_service');
|
||||
var BatchLogger = require('./batch-logger');
|
||||
var Batch = require('./batch');
|
||||
|
||||
module.exports = function batchFactory (metadataBackend, redisConfig, statsdClient) {
|
||||
module.exports = function batchFactory (metadataBackend, redisConfig, statsdClient, loggerPath) {
|
||||
var redisPoolSubscriber = new RedisPool(_.extend(redisConfig, { name: 'batch-subscriber'}));
|
||||
var redisPoolPublisher = new RedisPool(_.extend(redisConfig, { name: 'batch-publisher'}));
|
||||
var queueSeeker = new QueueSeeker(metadataBackend);
|
||||
@ -30,6 +30,7 @@ module.exports = function batchFactory (metadataBackend, redisConfig, statsdClie
|
||||
var jobCanceller = new JobCanceller(userDatabaseMetadataService);
|
||||
var jobService = new JobService(jobBackend, jobCanceller);
|
||||
var jobRunner = new JobRunner(jobService, jobQueue, queryRunner, statsdClient);
|
||||
var logger = new BatchLogger(loggerPath);
|
||||
|
||||
return new Batch(jobSubscriber, jobQueuePool, jobRunner, jobService);
|
||||
return new Batch(jobSubscriber, jobQueuePool, jobRunner, jobService, logger);
|
||||
};
|
||||
|
@ -17,6 +17,7 @@
|
||||
"Sandro Santilli <strk@vizzuality.com>"
|
||||
],
|
||||
"dependencies": {
|
||||
"bunyan": "1.8.1",
|
||||
"cartodb-psql": "~0.6.0",
|
||||
"cartodb-query-tables": "0.2.0",
|
||||
"cartodb-redis": "0.13.1",
|
||||
|
Loading…
Reference in New Issue
Block a user