Merge pull request #37 from dbrain/master

Multiprocess (tcp) appender
This commit is contained in:
Gareth Jones 2011-11-02 16:03:53 -07:00
commit 283a403a11
4 changed files with 312 additions and 1 deletions

View File

@ -160,6 +160,66 @@ This was mainly created for [cluster](https://github.com/LearnBoost/cluster), bu
log4js-master/worker.json hookio appender parameters will be passed into the Hook constructor directly, so you can specify hook-port, hook-host etc.
*NOTE* hook.io appender will currently (and probably indefinitely) explode if you enable hook.io debug because of the way log4js overrides console.log
## multiprocess (tcp socket) logger
A multiprocess logger has been added to log4js by [dbrain](https://github.com/dbrain). This allows multiple worker processes to log through a single master process, avoiding issues with rolling etc. in a clustered environment.
This was mainly created for [cluster](https://github.com/LearnBoost/cluster), but you can adapt the example to match your needs, you know, if it fits them.
<pre>
#### log4js-master.json ####
# Will listen for connections on port and host
{
"appenders": [{
"type": "logLevelFilter",
"level": "DEBUG",
"appender": {
"type": "multiprocess",
"mode": "master",
"loggerPort": 5001,
"loggerHost": "simonsaysdie",
"appender": {
"type": "file",
"filename": "muffin.log",
"maxLogSize": 104857600,
"backups": 10,
"pollInterval": 15
}
}
}]
}
#### log4js-worker.json ####
# Will connect to master (tcp server) and send stringified log events
{
"appenders": [{
"type": "logLevelFilter",
"level": "DEBUG",
"appender": {
"type": "multiprocess",
"mode": "worker",
"loggerPort": 5001,
"loggerHost": "simonsaysdie"
}
}]
}
#### ilikebeans.js ####
var cluster = require('cluster');
var immaCluster = cluster('./doyoulikebeans');
// Perform the once off configuration depending on type of cluster
if (immaCluster.isMaster) {
require('log4js').configure('log4js-master.json');
} else {
require('log4js').configure('log4js-worker.json');
}
// Standard cluster startup
immaCluster
.use(cluster.logger('run/logs'))
.use(cluster.pidfiles('run/pids'))
.listen(3000);
</pre>
## author (of this node version)
Gareth Jones (csausdev - gareth.jones@sensis.com.au)

View File

@ -0,0 +1,77 @@
var log4js = require('../log4js');
var layouts = require('../layouts');
var net = require('net');
var util = require('util');
var LogServer = function createLogServer(config) {
var actualAppender = config.actualAppender;
var server = net.createServer(function serverCreated(clientSocket) {
clientSocket.on('connect', function clientConnected() {
var logMessage = '';
clientSocket.on('data', function chunkReceived(chunk) {
logMessage += chunk;
});
clientSocket.on('end', function gotChunks() {
try {
var loggingEvent = JSON.parse(logMessage);
deserializeLoggingEvent(loggingEvent);
actualAppender(loggingEvent);
} catch (e) {
// JSON.parse failed, just log the contents probably a naughty.
actualAppender(createLoggingEvent('ERROR', 'Unable to parse log: ' + logMessage));
}
});
});
});
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
}
function createLoggingEvent(level, message) {
return {
startTime: new Date(),
categoryName: 'log4js',
level: { toString: function () {
return level;
}},
data: [ message ]
};
}
function deserializeLoggingEvent(loggingEvent) {
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level.toString = function levelToString() {
return loggingEvent.level.levelStr;
};
}
function workerAppender(config) {
return function log(loggingEvent) {
var socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
socket.on('connect', function socketConnected() {
socket.end(JSON.stringify(loggingEvent), 'utf8');
});
};
}
function createAppender(config) {
if (config.mode === 'master') {
var server = new LogServer(config);
return config.actualAppender;
} else {
return workerAppender(config);
}
}
function configure(config) {
var actualAppender;
if (config.appender && config.mode === 'master') {
log4js.loadAppender(config.appender.type);
actualAppender = log4js.appenderMakers[config.appender.type](config.appender);
config.actualAppender = actualAppender;
}
return createAppender(config);
}
exports.name = 'multiprocess';
exports.appender = createAppender;
exports.configure = configure;

View File

@ -26,6 +26,7 @@
"devDependencies": {
"vows": ">=0.5.2",
"sandboxed-module": ">= 0.1.1",
"hook.io": "0.7.7"
"hook.io": "0.7.7",
"underscore": "1.2.1"
}
}

View File

@ -0,0 +1,173 @@
var vows = require('vows');
var assert = require('assert');
var sandbox = require('sandboxed-module');
var _ = require('underscore');
function fancyResultingMultiprocessAppender(opts) {
var result = { clientOns: {}, serverOns: {}, logged: [], ended: [] };
var fakeSocket = {
on: function (event, fn) {
result.clientOns[event] = fn;
if (event === 'connect') {
fn();
}
},
end: function (data, encoding) {
result.ended.push({ data: data, encoding: encoding });
}
}
var fakeServerSocket = {
on: function (event, fn) {
result.serverOns[event] = fn;
if (event === 'connect') {
fn();
}
}
}
var fakeServer = {
listen: function (port, host) {
result.listenPort = port;
result.listenHost = host;
}
}
var fakeNet = {
createServer: function (fn) {
fn(fakeServerSocket);
return fakeServer;
},
createConnection: function (port, host) {
result.connectPort = port;
result.connectHost = host;
return fakeSocket;
}
}
var fakeLog4Js = {
appenderMakers: {}
};
fakeLog4Js.loadAppender = function (appender) {
fakeLog4Js.appenderMakers[appender] = function (config) {
result.actualLoggerConfig = config;
return function log(logEvent) {
result.logged.push(logEvent);
}
};
};
return { theResult: result,
theModule: sandbox.require('../lib/appenders/multiprocess', {
requires: {
'../log4js': fakeLog4Js,
'net': fakeNet
}
})
};
}
function logMessages(result, logs, raw) {
logs.forEach(function log(item) {
var logItem = { startTime: "Wed, 02 Nov 2011 21:46:39 GMT", level: { levelStr: 'DEBUG' }, data: [ item ] };
result.serverOns.data(JSON.stringify(logItem));
result.serverOns.end();
result.serverOns.connect();
});
if (raw) {
raw.forEach(function log(rawItem) {
result.serverOns.data(rawItem);
result.serverOns.end();
result.serverOns.connect();
});
}
}
vows.describe('log4js multiprocessAppender').addBatch({
'master': {
topic: function() {
var fancy = fancyResultingMultiprocessAppender();
var logger = fancy.theModule.configure({ mode: 'master', 'loggerPort': 5001, 'loggerHost': 'abba', appender: { type: 'file' } });
logMessages(fancy.theResult, [ 'ALRIGHTY THEN', 'OH WOW' ]);
return fancy.theResult;
},
'should write to the actual appender': function (result) {
assert.equal(result.listenPort, 5001);
assert.equal(result.listenHost, 'abba');
assert.equal(result.logged.length, 2);
assert.equal(result.logged[0].data[0], 'ALRIGHTY THEN');
assert.equal(result.logged[1].data[0], 'OH WOW');
},
'data written should be formatted correctly': function (result) {
assert.equal(result.logged[0].level.toString(), 'DEBUG');
assert.equal(result.logged[0].data, 'ALRIGHTY THEN');
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
assert.equal(result.logged[1].level.toString(), 'DEBUG');
assert.equal(result.logged[1].data, 'OH WOW');
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
},
'the actual logger should get the right config': function (result) {
assert.equal(result.actualLoggerConfig.type, 'file');
},
'client should not be called': function (result) {
assert.equal(_.keys(result.clientOns).length, 0);
}
},
'master with bad request': {
topic: function() {
var fancy = fancyResultingMultiprocessAppender();
var logger = fancy.theModule.configure({ mode: 'master', 'loggerPort': 5001, 'loggerHost': 'abba', appender: { type: 'file' } });
logMessages(fancy.theResult, [], [ 'ALRIGHTY THEN', 'OH WOW' ]);
return fancy.theResult;
},
'should write to the actual appender': function (result) {
assert.equal(result.listenPort, 5001);
assert.equal(result.listenHost, 'abba');
assert.equal(result.logged.length, 2);
assert.equal(result.logged[0].data[0], 'Unable to parse log: ALRIGHTY THEN');
assert.equal(result.logged[1].data[0], 'Unable to parse log: OH WOW');
},
'data written should be formatted correctly': function (result) {
assert.equal(result.logged[0].level.toString(), 'ERROR');
assert.equal(result.logged[0].data, 'Unable to parse log: ALRIGHTY THEN');
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
assert.equal(result.logged[1].level.toString(), 'ERROR');
assert.equal(result.logged[1].data, 'Unable to parse log: OH WOW');
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
}
},
'worker': {
'should emit logging events to the master': {
topic: function() {
var fancy = fancyResultingMultiprocessAppender();
var logger = fancy.theModule.configure({ loggerHost: 'baba', loggerPort: 1232, name: 'ohno', mode: 'worker', appender: { type: 'file' } });
logger({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' });
logger({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'});
return fancy.theResult;
},
'client configuration should be correct': function (result) {
assert.equal(result.connectHost, 'baba');
assert.equal(result.connectPort, 1232);
},
'should not write to the actual appender': function (result) {
assert.equal(result.logged.length, 0);
assert.equal(result.ended.length, 2);
assert.equal(result.ended[0].data, JSON.stringify({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' }));
assert.equal(result.ended[0].encoding, 'utf8');
assert.equal(result.ended[1].data, JSON.stringify({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'}));
assert.equal(result.ended[1].encoding, 'utf8');
assert.equal(_.keys(result.serverOns).length, 0);
}
}
}
}).exportTo(module);