Merge pull request #1 from CartoDB/master
Upstream update, adds x-forwarde-for by default
This commit is contained in:
commit
145d5f91e3
@ -1,5 +1,6 @@
|
|||||||
language: node_js
|
language: node_js
|
||||||
node_js:
|
node_js:
|
||||||
|
- "0.12"
|
||||||
- "0.10"
|
- "0.10"
|
||||||
- "0.8"
|
- "0.8"
|
||||||
|
|
||||||
|
@ -108,8 +108,9 @@ For FileAppender you can also pass the path to the log directory as an option wh
|
|||||||
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
|
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
|
||||||
```
|
```
|
||||||
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
|
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
|
||||||
```json
|
|
||||||
#### my_log4js_configuration.json ####
|
#### my_log4js_configuration.json ####
|
||||||
|
```json
|
||||||
{
|
{
|
||||||
"appenders": [
|
"appenders": [
|
||||||
{
|
{
|
||||||
|
@ -35,11 +35,13 @@ logger.setLevel('ERROR');
|
|||||||
//console logging methods have been replaced with log4js ones.
|
//console logging methods have been replaced with log4js ones.
|
||||||
//so this will get coloured output on console, and appear in cheese.log
|
//so this will get coloured output on console, and appear in cheese.log
|
||||||
console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" });
|
console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" });
|
||||||
|
console.log("This should appear as info output");
|
||||||
|
|
||||||
//these will not appear (logging level beneath error)
|
//these will not appear (logging level beneath error)
|
||||||
logger.trace('Entering cheese testing');
|
logger.trace('Entering cheese testing');
|
||||||
logger.debug('Got cheese.');
|
logger.debug('Got cheese.');
|
||||||
logger.info('Cheese is Gouda.');
|
logger.info('Cheese is Gouda.');
|
||||||
|
logger.log('Something funny about cheese.');
|
||||||
logger.warn('Cheese is quite smelly.');
|
logger.warn('Cheese is quite smelly.');
|
||||||
//these end up on the console and in cheese.log
|
//these end up on the console and in cheese.log
|
||||||
logger.error('Cheese %s is too ripe!', "gouda");
|
logger.error('Cheese %s is too ripe!', "gouda");
|
||||||
|
@ -87,6 +87,15 @@ function createAppender(config) {
|
|||||||
// console.log("master : " + cluster.isMaster + " received message: " + JSON.stringify(message.event));
|
// console.log("master : " + cluster.isMaster + " received message: " + JSON.stringify(message.event));
|
||||||
|
|
||||||
var loggingEvent = deserializeLoggingEvent(message.event);
|
var loggingEvent = deserializeLoggingEvent(message.event);
|
||||||
|
|
||||||
|
// Adding PID metadata
|
||||||
|
loggingEvent.pid = worker.process.pid;
|
||||||
|
loggingEvent.cluster = {
|
||||||
|
master: process.pid,
|
||||||
|
worker: worker.process.pid,
|
||||||
|
workerId: worker.id
|
||||||
|
};
|
||||||
|
|
||||||
masterAppender(loggingEvent);
|
masterAppender(loggingEvent);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -2,10 +2,10 @@
|
|||||||
var layouts = require('../layouts')
|
var layouts = require('../layouts')
|
||||||
, consoleLog = console.log.bind(console);
|
, consoleLog = console.log.bind(console);
|
||||||
|
|
||||||
function consoleAppender (layout) {
|
function consoleAppender (layout, timezoneOffset) {
|
||||||
layout = layout || layouts.colouredLayout;
|
layout = layout || layouts.colouredLayout;
|
||||||
return function(loggingEvent) {
|
return function(loggingEvent) {
|
||||||
consoleLog(layout(loggingEvent));
|
consoleLog(layout(loggingEvent, timezoneOffset));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -14,7 +14,7 @@ function configure(config) {
|
|||||||
if (config.layout) {
|
if (config.layout) {
|
||||||
layout = layouts.layout(config.layout.type, config.layout);
|
layout = layouts.layout(config.layout.type, config.layout);
|
||||||
}
|
}
|
||||||
return consoleAppender(layout);
|
return consoleAppender(layout, config.timezoneOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.appender = consoleAppender;
|
exports.appender = consoleAppender;
|
||||||
|
@ -20,8 +20,9 @@ process.on('exit', function() {
|
|||||||
* @pattern the format that will be added to the end of filename when rolling,
|
* @pattern the format that will be added to the end of filename when rolling,
|
||||||
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
|
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
|
||||||
* @layout layout function for log messages - defaults to basicLayout
|
* @layout layout function for log messages - defaults to basicLayout
|
||||||
|
* @timezoneOffset optional timezone offset in minutes - defaults to system local
|
||||||
*/
|
*/
|
||||||
function appender(filename, pattern, alwaysIncludePattern, layout) {
|
function appender(filename, pattern, alwaysIncludePattern, layout, timezoneOffset) {
|
||||||
layout = layout || layouts.basicLayout;
|
layout = layout || layouts.basicLayout;
|
||||||
|
|
||||||
var logFile = new streams.DateRollingFileStream(
|
var logFile = new streams.DateRollingFileStream(
|
||||||
@ -32,7 +33,7 @@ function appender(filename, pattern, alwaysIncludePattern, layout) {
|
|||||||
openFiles.push(logFile);
|
openFiles.push(logFile);
|
||||||
|
|
||||||
return function(logEvent) {
|
return function(logEvent) {
|
||||||
logFile.write(layout(logEvent) + eol, "utf8");
|
logFile.write(layout(logEvent, timezoneOffset) + eol, "utf8");
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -52,7 +53,7 @@ function configure(config, options) {
|
|||||||
config.filename = path.join(options.cwd, config.filename);
|
config.filename = path.join(options.cwd, config.filename);
|
||||||
}
|
}
|
||||||
|
|
||||||
return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout);
|
return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout, config.timezoneOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
function shutdown(cb) {
|
function shutdown(cb) {
|
||||||
|
@ -6,7 +6,8 @@ var layouts = require('../layouts')
|
|||||||
, streams = require('../streams')
|
, streams = require('../streams')
|
||||||
, os = require('os')
|
, os = require('os')
|
||||||
, eol = os.EOL || '\n'
|
, eol = os.EOL || '\n'
|
||||||
, openFiles = [];
|
, openFiles = []
|
||||||
|
, levels = require('../levels');
|
||||||
|
|
||||||
//close open files on process exit.
|
//close open files on process exit.
|
||||||
process.on('exit', function() {
|
process.on('exit', function() {
|
||||||
@ -25,8 +26,10 @@ process.on('exit', function() {
|
|||||||
* if not provided then logs won't be rotated.
|
* if not provided then logs won't be rotated.
|
||||||
* @param numBackups - the number of log files to keep after logSize
|
* @param numBackups - the number of log files to keep after logSize
|
||||||
* has been reached (default 5)
|
* has been reached (default 5)
|
||||||
|
* @param compress - flag that controls log file compression
|
||||||
|
* @param timezoneOffset - optional timezone offset in minutes (default system local)
|
||||||
*/
|
*/
|
||||||
function fileAppender (file, layout, logSize, numBackups) {
|
function fileAppender (file, layout, logSize, numBackups, compress, timezoneOffset) {
|
||||||
var bytesWritten = 0;
|
var bytesWritten = 0;
|
||||||
file = path.normalize(file);
|
file = path.normalize(file);
|
||||||
layout = layout || layouts.basicLayout;
|
layout = layout || layouts.basicLayout;
|
||||||
@ -40,7 +43,8 @@ function fileAppender (file, layout, logSize, numBackups) {
|
|||||||
stream = new streams.RollingFileStream(
|
stream = new streams.RollingFileStream(
|
||||||
file,
|
file,
|
||||||
fileSize,
|
fileSize,
|
||||||
numFiles
|
numFiles,
|
||||||
|
{ "compress": compress }
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
stream = fs.createWriteStream(
|
stream = fs.createWriteStream(
|
||||||
@ -62,8 +66,9 @@ function fileAppender (file, layout, logSize, numBackups) {
|
|||||||
openFiles.push(logFile);
|
openFiles.push(logFile);
|
||||||
|
|
||||||
return function(loggingEvent) {
|
return function(loggingEvent) {
|
||||||
logFile.write(layout(loggingEvent) + eol, "utf8");
|
logFile.write(layout(loggingEvent, timezoneOffset) + eol, "utf8");
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function configure(config, options) {
|
function configure(config, options) {
|
||||||
@ -76,7 +81,7 @@ function configure(config, options) {
|
|||||||
config.filename = path.join(options.cwd, config.filename);
|
config.filename = path.join(options.cwd, config.filename);
|
||||||
}
|
}
|
||||||
|
|
||||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.compress, config.timezoneOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
function shutdown(cb) {
|
function shutdown(cb) {
|
||||||
|
@ -127,8 +127,10 @@ RollingFileSync.prototype.write = function(chunk, encoding) {
|
|||||||
* if not provided then logs won't be rotated.
|
* if not provided then logs won't be rotated.
|
||||||
* @param numBackups - the number of log files to keep after logSize
|
* @param numBackups - the number of log files to keep after logSize
|
||||||
* has been reached (default 5)
|
* has been reached (default 5)
|
||||||
|
* @param timezoneOffset - optional timezone offset in minutes
|
||||||
|
* (default system local)
|
||||||
*/
|
*/
|
||||||
function fileAppender (file, layout, logSize, numBackups) {
|
function fileAppender (file, layout, logSize, numBackups, timezoneOffset) {
|
||||||
debug("fileSync appender created");
|
debug("fileSync appender created");
|
||||||
var bytesWritten = 0;
|
var bytesWritten = 0;
|
||||||
file = path.normalize(file);
|
file = path.normalize(file);
|
||||||
@ -166,7 +168,7 @@ function fileAppender (file, layout, logSize, numBackups) {
|
|||||||
var logFile = openTheStream(file, logSize, numBackups);
|
var logFile = openTheStream(file, logSize, numBackups);
|
||||||
|
|
||||||
return function(loggingEvent) {
|
return function(loggingEvent) {
|
||||||
logFile.write(layout(loggingEvent) + eol);
|
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -180,7 +182,7 @@ function configure(config, options) {
|
|||||||
config.filename = path.join(options.cwd, config.filename);
|
config.filename = path.join(options.cwd, config.filename);
|
||||||
}
|
}
|
||||||
|
|
||||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.timezoneOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.appender = fileAppender;
|
exports.appender = fileAppender;
|
||||||
|
@ -85,6 +85,8 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
|||||||
var firstData = data[0];
|
var firstData = data[0];
|
||||||
|
|
||||||
if (!firstData.GELF) return; // identify with GELF field defined
|
if (!firstData.GELF) return; // identify with GELF field defined
|
||||||
|
// Remove the GELF key, some gelf supported logging systems drop the message with it
|
||||||
|
delete firstData.GELF;
|
||||||
Object.keys(firstData).forEach(function(key) {
|
Object.keys(firstData).forEach(function(key) {
|
||||||
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
||||||
if (key.match(/^_/) || key !== "_id") {
|
if (key.match(/^_/) || key !== "_id") {
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
var layouts = require("../layouts")
|
var layouts = require("../layouts")
|
||||||
, mailer = require("nodemailer")
|
, mailer = require("nodemailer")
|
||||||
, os = require('os');
|
, os = require('os')
|
||||||
|
, async = require('async')
|
||||||
|
, unsentCount = 0
|
||||||
|
, shutdownTimeout;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SMTP Appender. Sends logging events using SMTP protocol.
|
* SMTP Appender. Sends logging events using SMTP protocol.
|
||||||
@ -11,6 +14,7 @@ var layouts = require("../layouts")
|
|||||||
* @param config appender configuration data
|
* @param config appender configuration data
|
||||||
* config.sendInterval time between log emails (in seconds), if 0
|
* config.sendInterval time between log emails (in seconds), if 0
|
||||||
* then every event sends an email
|
* then every event sends an email
|
||||||
|
* config.shutdownTimeout time to give up remaining emails (in seconds; defaults to 5).
|
||||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||||
*/
|
*/
|
||||||
function smtpAppender(config, layout) {
|
function smtpAppender(config, layout) {
|
||||||
@ -21,22 +25,31 @@ function smtpAppender(config, layout) {
|
|||||||
var logEventBuffer = [];
|
var logEventBuffer = [];
|
||||||
var sendTimer;
|
var sendTimer;
|
||||||
|
|
||||||
|
shutdownTimeout = ('shutdownTimeout' in config ? config.shutdownTimeout : 5) * 1000;
|
||||||
|
|
||||||
function sendBuffer() {
|
function sendBuffer() {
|
||||||
if (logEventBuffer.length > 0) {
|
if (logEventBuffer.length > 0) {
|
||||||
|
|
||||||
var transport = mailer.createTransport(config.transport, config[config.transport]);
|
var transport = mailer.createTransport(config.SMTP);
|
||||||
var firstEvent = logEventBuffer[0];
|
var firstEvent = logEventBuffer[0];
|
||||||
var body = "";
|
var body = "";
|
||||||
|
var count = logEventBuffer.length;
|
||||||
while (logEventBuffer.length > 0) {
|
while (logEventBuffer.length > 0) {
|
||||||
body += layout(logEventBuffer.shift()) + "\n";
|
body += layout(logEventBuffer.shift(), config.timezoneOffset) + "\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
var msg = {
|
var msg = {
|
||||||
to: config.recipients,
|
to: config.recipients,
|
||||||
subject: config.subject || subjectLayout(firstEvent),
|
subject: config.subject || subjectLayout(firstEvent),
|
||||||
text: body,
|
|
||||||
headers: { "Hostname": os.hostname() }
|
headers: { "Hostname": os.hostname() }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (!config.html) {
|
||||||
|
msg.text = body;
|
||||||
|
} else {
|
||||||
|
msg.html = body;
|
||||||
|
}
|
||||||
|
|
||||||
if (config.sender) {
|
if (config.sender) {
|
||||||
msg.from = config.sender;
|
msg.from = config.sender;
|
||||||
}
|
}
|
||||||
@ -45,6 +58,7 @@ function smtpAppender(config, layout) {
|
|||||||
console.error("log4js.smtpAppender - Error happened", error);
|
console.error("log4js.smtpAppender - Error happened", error);
|
||||||
}
|
}
|
||||||
transport.close();
|
transport.close();
|
||||||
|
unsentCount -= count;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -59,6 +73,7 @@ function smtpAppender(config, layout) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return function(loggingEvent) {
|
return function(loggingEvent) {
|
||||||
|
unsentCount++;
|
||||||
logEventBuffer.push(loggingEvent);
|
logEventBuffer.push(loggingEvent);
|
||||||
if (sendInterval > 0) {
|
if (sendInterval > 0) {
|
||||||
scheduleSend();
|
scheduleSend();
|
||||||
@ -76,7 +91,19 @@ function configure(config) {
|
|||||||
return smtpAppender(config, layout);
|
return smtpAppender(config, layout);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function shutdown(cb) {
|
||||||
|
if (shutdownTimeout > 0) {
|
||||||
|
setTimeout(function() { unsentCount = 0; }, shutdownTimeout);
|
||||||
|
}
|
||||||
|
async.whilst(function() {
|
||||||
|
return unsentCount > 0;
|
||||||
|
}, function(done) {
|
||||||
|
setTimeout(done, 100);
|
||||||
|
}, cb);
|
||||||
|
}
|
||||||
|
|
||||||
exports.name = "smtp";
|
exports.name = "smtp";
|
||||||
exports.appender = smtpAppender;
|
exports.appender = smtpAppender;
|
||||||
exports.configure = configure;
|
exports.configure = configure;
|
||||||
|
exports.shutdown = shutdown;
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
var levels = require("./levels");
|
var levels = require("./levels");
|
||||||
|
var _ = require('underscore');
|
||||||
var DEFAULT_FORMAT = ':remote-addr - -' +
|
var DEFAULT_FORMAT = ':remote-addr - -' +
|
||||||
' ":method :url HTTP/:http-version"' +
|
' ":method :url HTTP/:http-version"' +
|
||||||
' :status :content-length ":referrer"' +
|
' :status :content-length ":referrer"' +
|
||||||
@ -88,11 +89,12 @@ function getLogger(logger4js, options) {
|
|||||||
if(res.statusCode >= 400) level = levels.ERROR;
|
if(res.statusCode >= 400) level = levels.ERROR;
|
||||||
}
|
}
|
||||||
if (thislogger.isLevelEnabled(level)) {
|
if (thislogger.isLevelEnabled(level)) {
|
||||||
|
var combined_tokens = assemble_tokens(req, res, options.tokens || []);
|
||||||
if (typeof fmt === 'function') {
|
if (typeof fmt === 'function') {
|
||||||
var line = fmt(req, res, function(str){ return format(str, req, res); });
|
var line = fmt(req, res, function(str){ return format(str, combined_tokens); });
|
||||||
if (line) thislogger.log(level, line);
|
if (line) thislogger.log(level, line);
|
||||||
} else {
|
} else {
|
||||||
thislogger.log(level, format(fmt, req, res));
|
thislogger.log(level, format(fmt, combined_tokens));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -103,6 +105,52 @@ function getLogger(logger4js, options) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds custom {token, replacement} objects to defaults, overwriting the defaults if any tokens clash
|
||||||
|
*
|
||||||
|
* @param {IncomingMessage} req
|
||||||
|
* @param {ServerResponse} res
|
||||||
|
* @param {Array} custom_tokens [{ token: string-or-regexp, replacement: string-or-replace-function }]
|
||||||
|
* @return {Array}
|
||||||
|
*/
|
||||||
|
function assemble_tokens(req, res, custom_tokens) {
|
||||||
|
var array_unique_tokens = function(array) {
|
||||||
|
var a = array.concat();
|
||||||
|
for(var i=0; i<a.length; ++i) {
|
||||||
|
for(var j=i+1; j<a.length; ++j) {
|
||||||
|
if(a[i].token == a[j].token) { // not === because token can be regexp object
|
||||||
|
a.splice(j--, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return a;
|
||||||
|
};
|
||||||
|
|
||||||
|
var default_tokens = [];
|
||||||
|
default_tokens.push({ token: ':url', replacement: req.originalUrl });
|
||||||
|
default_tokens.push({ token: ':method', replacement: req.method });
|
||||||
|
default_tokens.push({ token: ':status', replacement: res.__statusCode || res.statusCode });
|
||||||
|
default_tokens.push({ token: ':response-time', replacement: res.responseTime });
|
||||||
|
default_tokens.push({ token: ':date', replacement: new Date().toUTCString() });
|
||||||
|
default_tokens.push({ token: ':referrer', replacement: req.headers.referer || req.headers.referrer || '' });
|
||||||
|
default_tokens.push({ token: ':http-version', replacement: req.httpVersionMajor + '.' + req.httpVersionMinor });
|
||||||
|
default_tokens.push({ token: ':remote-addr', replacement: req.headers['x-forwarded-for'] || req.ip || req._remoteAddress ||
|
||||||
|
(req.socket && (req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress))) });
|
||||||
|
default_tokens.push({ token: ':user-agent', replacement: req.headers['user-agent'] });
|
||||||
|
default_tokens.push({ token: ':content-length', replacement: (res._headers && res._headers['content-length']) ||
|
||||||
|
(res.__headers && res.__headers['Content-Length']) || '-' });
|
||||||
|
default_tokens.push({ token: /:req\[([^\]]+)\]/g, replacement: function(_, field) {
|
||||||
|
return req.headers[field.toLowerCase()];
|
||||||
|
} });
|
||||||
|
default_tokens.push({ token: /:res\[([^\]]+)\]/g, replacement: function(_, field) {
|
||||||
|
return res._headers ?
|
||||||
|
(res._headers[field.toLowerCase()] || res.__headers[field])
|
||||||
|
: (res.__headers && res.__headers[field]);
|
||||||
|
} });
|
||||||
|
|
||||||
|
return array_unique_tokens(custom_tokens.concat(default_tokens));
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return formatted log line.
|
* Return formatted log line.
|
||||||
*
|
*
|
||||||
@ -113,33 +161,10 @@ function getLogger(logger4js, options) {
|
|||||||
* @api private
|
* @api private
|
||||||
*/
|
*/
|
||||||
|
|
||||||
function format(str, req, res) {
|
function format(str, tokens) {
|
||||||
return str
|
return _.reduce(tokens, function(current_string, token) {
|
||||||
.replace(':url', req.originalUrl)
|
return current_string.replace(token.token, token.replacement);
|
||||||
.replace(':method', req.method)
|
}, str);
|
||||||
.replace(':status', res.__statusCode || res.statusCode)
|
|
||||||
.replace(':response-time', res.responseTime)
|
|
||||||
.replace(':date', new Date().toUTCString())
|
|
||||||
.replace(':referrer', req.headers.referer || req.headers.referrer || '')
|
|
||||||
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
|
|
||||||
.replace(
|
|
||||||
':remote-addr', req.ip || req._remoteAddress || (
|
|
||||||
req.socket &&
|
|
||||||
(req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress))
|
|
||||||
))
|
|
||||||
.replace(':user-agent', req.headers['user-agent'] || '')
|
|
||||||
.replace(
|
|
||||||
':content-length',
|
|
||||||
(res._headers && res._headers['content-length']) ||
|
|
||||||
(res.__headers && res.__headers['Content-Length']) ||
|
|
||||||
'-'
|
|
||||||
)
|
|
||||||
.replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; })
|
|
||||||
.replace(/:res\[([^\]]+)\]/g, function(_, field){
|
|
||||||
return res._headers ?
|
|
||||||
(res._headers[field.toLowerCase()] || res.__headers[field])
|
|
||||||
: (res.__headers && res.__headers[field]);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -21,9 +21,9 @@ function addZero(vNumber) {
|
|||||||
* Thanks to http://www.svendtofte.com/code/date_format/
|
* Thanks to http://www.svendtofte.com/code/date_format/
|
||||||
* @private
|
* @private
|
||||||
*/
|
*/
|
||||||
function offset(date) {
|
function offset(timezoneOffset) {
|
||||||
// Difference to Greenwich time (GMT) in hours
|
// Difference to Greenwich time (GMT) in hours
|
||||||
var os = Math.abs(date.getTimezoneOffset());
|
var os = Math.abs(timezoneOffset);
|
||||||
var h = String(Math.floor(os/60));
|
var h = String(Math.floor(os/60));
|
||||||
var m = String(os%60);
|
var m = String(os%60);
|
||||||
if (h.length == 1) {
|
if (h.length == 1) {
|
||||||
@ -32,26 +32,32 @@ function offset(date) {
|
|||||||
if (m.length == 1) {
|
if (m.length == 1) {
|
||||||
m = "0" + m;
|
m = "0" + m;
|
||||||
}
|
}
|
||||||
return date.getTimezoneOffset() < 0 ? "+"+h+m : "-"+h+m;
|
return timezoneOffset < 0 ? "+"+h+m : "-"+h+m;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.asString = function(/*format,*/ date) {
|
exports.asString = function(/*format,*/ date, timezoneOffset) {
|
||||||
var format = exports.ISO8601_FORMAT;
|
var format = exports.ISO8601_FORMAT;
|
||||||
if (typeof(date) === "string") {
|
if (typeof(date) === "string") {
|
||||||
format = arguments[0];
|
format = arguments[0];
|
||||||
date = arguments[1];
|
date = arguments[1];
|
||||||
|
timezoneOffset = arguments[2];
|
||||||
}
|
}
|
||||||
|
// make the date independent of the system timezone by working with UTC
|
||||||
var vDay = addZero(date.getDate());
|
if (timezoneOffset === undefined) {
|
||||||
var vMonth = addZero(date.getMonth()+1);
|
timezoneOffset = date.getTimezoneOffset();
|
||||||
var vYearLong = addZero(date.getFullYear());
|
}
|
||||||
var vYearShort = addZero(date.getFullYear().toString().substring(2,4));
|
date.setUTCMinutes(date.getUTCMinutes() - timezoneOffset);
|
||||||
|
var vDay = addZero(date.getUTCDate());
|
||||||
|
var vMonth = addZero(date.getUTCMonth()+1);
|
||||||
|
var vYearLong = addZero(date.getUTCFullYear());
|
||||||
|
var vYearShort = addZero(date.getUTCFullYear().toString().substring(2,4));
|
||||||
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
|
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
|
||||||
var vHour = addZero(date.getHours());
|
var vHour = addZero(date.getUTCHours());
|
||||||
var vMinute = addZero(date.getMinutes());
|
var vMinute = addZero(date.getUTCMinutes());
|
||||||
var vSecond = addZero(date.getSeconds());
|
var vSecond = addZero(date.getUTCSeconds());
|
||||||
var vMillisecond = padWithZeros(date.getMilliseconds(), 3);
|
var vMillisecond = padWithZeros(date.getUTCMilliseconds(), 3);
|
||||||
var vTimeZone = offset(date);
|
var vTimeZone = offset(timezoneOffset);
|
||||||
|
date.setUTCMinutes(date.getUTCMinutes() + timezoneOffset);
|
||||||
var formatted = format
|
var formatted = format
|
||||||
.replace(/dd/g, vDay)
|
.replace(/dd/g, vDay)
|
||||||
.replace(/MM/g, vMonth)
|
.replace(/MM/g, vMonth)
|
||||||
|
@ -71,11 +71,11 @@ function colorize (str, style) {
|
|||||||
return colorizeStart(style) + str + colorizeEnd(style);
|
return colorizeStart(style) + str + colorizeEnd(style);
|
||||||
}
|
}
|
||||||
|
|
||||||
function timestampLevelAndCategory(loggingEvent, colour) {
|
function timestampLevelAndCategory(loggingEvent, colour, timezoneOffest) {
|
||||||
var output = colorize(
|
var output = colorize(
|
||||||
formatLogData(
|
formatLogData(
|
||||||
'[%s] [%s] %s - '
|
'[%s] [%s] %s - '
|
||||||
, dateFormat.asString(loggingEvent.startTime)
|
, dateFormat.asString(loggingEvent.startTime, timezoneOffest)
|
||||||
, loggingEvent.level
|
, loggingEvent.level
|
||||||
, loggingEvent.categoryName
|
, loggingEvent.categoryName
|
||||||
)
|
)
|
||||||
@ -93,18 +93,19 @@ function timestampLevelAndCategory(loggingEvent, colour) {
|
|||||||
*
|
*
|
||||||
* @author Stephan Strittmatter
|
* @author Stephan Strittmatter
|
||||||
*/
|
*/
|
||||||
function basicLayout (loggingEvent) {
|
function basicLayout (loggingEvent, timezoneOffset) {
|
||||||
return timestampLevelAndCategory(loggingEvent) + formatLogData(loggingEvent.data);
|
return timestampLevelAndCategory(loggingEvent, undefined, timezoneOffset) + formatLogData(loggingEvent.data);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* colouredLayout - taken from masylum's fork.
|
* colouredLayout - taken from masylum's fork.
|
||||||
* same as basicLayout, but with colours.
|
* same as basicLayout, but with colours.
|
||||||
*/
|
*/
|
||||||
function colouredLayout (loggingEvent) {
|
function colouredLayout (loggingEvent, timezoneOffset) {
|
||||||
return timestampLevelAndCategory(
|
return timestampLevelAndCategory(
|
||||||
loggingEvent,
|
loggingEvent,
|
||||||
colours[loggingEvent.level.toString()]
|
colours[loggingEvent.level.toString()],
|
||||||
|
timezoneOffset
|
||||||
) + formatLogData(loggingEvent.data);
|
) + formatLogData(loggingEvent.data);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -139,13 +140,14 @@ function messagePassThroughLayout (loggingEvent) {
|
|||||||
* Takes a pattern string, array of tokens and returns a layout function.
|
* Takes a pattern string, array of tokens and returns a layout function.
|
||||||
* @param {String} Log format pattern String
|
* @param {String} Log format pattern String
|
||||||
* @param {object} map object of different tokens
|
* @param {object} map object of different tokens
|
||||||
|
* @param {number} timezone offset in minutes
|
||||||
* @return {Function}
|
* @return {Function}
|
||||||
* @author Stephan Strittmatter
|
* @author Stephan Strittmatter
|
||||||
* @author Jan Schmidle
|
* @author Jan Schmidle
|
||||||
*/
|
*/
|
||||||
function patternLayout (pattern, tokens) {
|
function patternLayout (pattern, tokens, timezoneOffset) {
|
||||||
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
||||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprzx%])(\{([^\}]+)\})?|([^%]+)/;
|
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprzxy%])(\{([^\}]+)\})?|([^%]+)/;
|
||||||
|
|
||||||
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
||||||
|
|
||||||
@ -177,7 +179,7 @@ function patternLayout (pattern, tokens) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Format the date
|
// Format the date
|
||||||
return dateFormat.asString(format, loggingEvent.startTime);
|
return dateFormat.asString(format, loggingEvent.startTime, timezoneOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
function hostname() {
|
function hostname() {
|
||||||
@ -197,7 +199,7 @@ function patternLayout (pattern, tokens) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function startTime(loggingEvent) {
|
function startTime(loggingEvent) {
|
||||||
return "" + loggingEvent.startTime.toLocaleTimeString();
|
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime, timezoneOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
function startColour(loggingEvent) {
|
function startColour(loggingEvent) {
|
||||||
@ -212,9 +214,26 @@ function patternLayout (pattern, tokens) {
|
|||||||
return '%';
|
return '%';
|
||||||
}
|
}
|
||||||
|
|
||||||
function pid() {
|
function pid(loggingEvent) {
|
||||||
|
if (loggingEvent && loggingEvent.pid) {
|
||||||
|
return loggingEvent.pid;
|
||||||
|
} else {
|
||||||
return process.pid;
|
return process.pid;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function clusterInfo(loggingEvent, specifier) {
|
||||||
|
if (loggingEvent.cluster && specifier) {
|
||||||
|
return specifier
|
||||||
|
.replace('%m', loggingEvent.cluster.master)
|
||||||
|
.replace('%w', loggingEvent.cluster.worker)
|
||||||
|
.replace('%i', loggingEvent.cluster.workerId);
|
||||||
|
} else if (loggingEvent.cluster) {
|
||||||
|
return loggingEvent.cluster.worker+'@'+loggingEvent.cluster.master;
|
||||||
|
} else {
|
||||||
|
return pid();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function userDefined(loggingEvent, specifier) {
|
function userDefined(loggingEvent, specifier) {
|
||||||
if (typeof(tokens[specifier]) !== 'undefined') {
|
if (typeof(tokens[specifier]) !== 'undefined') {
|
||||||
@ -237,6 +256,7 @@ function patternLayout (pattern, tokens) {
|
|||||||
'r': startTime,
|
'r': startTime,
|
||||||
'[': startColour,
|
'[': startColour,
|
||||||
']': endColour,
|
']': endColour,
|
||||||
|
'y': clusterInfo,
|
||||||
'z': pid,
|
'z': pid,
|
||||||
'%': percent,
|
'%': percent,
|
||||||
'x': userDefined
|
'x': userDefined
|
||||||
@ -295,9 +315,7 @@ function patternLayout (pattern, tokens) {
|
|||||||
} else {
|
} else {
|
||||||
// Create a raw replacement string based on the conversion
|
// Create a raw replacement string based on the conversion
|
||||||
// character and specifier
|
// character and specifier
|
||||||
var replacement =
|
var replacement = replaceToken(conversionCharacter, loggingEvent, specifier);
|
||||||
replaceToken(conversionCharacter, loggingEvent, specifier) ||
|
|
||||||
matchedString;
|
|
||||||
|
|
||||||
// Format the replacement according to any padding or
|
// Format the replacement according to any padding or
|
||||||
// truncation specified
|
// truncation specified
|
||||||
|
@ -63,6 +63,7 @@ module.exports = {
|
|||||||
WARN: new Level(30000, "WARN"),
|
WARN: new Level(30000, "WARN"),
|
||||||
ERROR: new Level(40000, "ERROR"),
|
ERROR: new Level(40000, "ERROR"),
|
||||||
FATAL: new Level(50000, "FATAL"),
|
FATAL: new Level(50000, "FATAL"),
|
||||||
|
MARK: new Level(9007199254740992, "MARK"), // 2^53
|
||||||
OFF: new Level(Number.MAX_VALUE, "OFF"),
|
OFF: new Level(Number.MAX_VALUE, "OFF"),
|
||||||
toLevel: toLevel
|
toLevel: toLevel
|
||||||
};
|
};
|
||||||
|
100
lib/log4js.js
100
lib/log4js.js
@ -65,6 +65,8 @@ var events = require('events')
|
|||||||
replaceConsole: false
|
replaceConsole: false
|
||||||
};
|
};
|
||||||
|
|
||||||
|
require('./appenders/console');
|
||||||
|
|
||||||
function hasLogger(logger) {
|
function hasLogger(logger) {
|
||||||
return loggers.hasOwnProperty(logger);
|
return loggers.hasOwnProperty(logger);
|
||||||
}
|
}
|
||||||
@ -92,6 +94,22 @@ function getBufferedLogger(categoryName) {
|
|||||||
return logger;
|
return logger;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function normalizeCategory (category) {
|
||||||
|
return category + '.';
|
||||||
|
}
|
||||||
|
|
||||||
|
function doesLevelEntryContainsLogger (levelCategory, loggerCategory) {
|
||||||
|
var normalizedLevelCategory = normalizeCategory(levelCategory);
|
||||||
|
var normalizedLoggerCategory = normalizeCategory(loggerCategory);
|
||||||
|
return normalizedLoggerCategory.substring(0, normalizedLevelCategory.length) == normalizedLevelCategory;
|
||||||
|
}
|
||||||
|
|
||||||
|
function doesAppenderContainsLogger (appenderCategory, loggerCategory) {
|
||||||
|
var normalizedAppenderCategory = normalizeCategory(appenderCategory);
|
||||||
|
var normalizedLoggerCategory = normalizeCategory(loggerCategory);
|
||||||
|
return normalizedLoggerCategory.substring(0, normalizedAppenderCategory.length) == normalizedAppenderCategory;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a logger instance. Instance is cached on categoryName level.
|
* Get a logger instance. Instance is cached on categoryName level.
|
||||||
@ -99,32 +117,51 @@ function getBufferedLogger(categoryName) {
|
|||||||
* @return {Logger} instance of logger for the category
|
* @return {Logger} instance of logger for the category
|
||||||
* @static
|
* @static
|
||||||
*/
|
*/
|
||||||
function getLogger (categoryName) {
|
function getLogger (loggerCategoryName) {
|
||||||
|
|
||||||
// Use default logger if categoryName is not specified or invalid
|
// Use default logger if categoryName is not specified or invalid
|
||||||
if (typeof categoryName !== "string") {
|
if (typeof loggerCategoryName !== "string") {
|
||||||
categoryName = Logger.DEFAULT_CATEGORY;
|
loggerCategoryName = Logger.DEFAULT_CATEGORY;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!hasLogger(loggerCategoryName)) {
|
||||||
|
|
||||||
|
var level = undefined;
|
||||||
|
|
||||||
|
// If there's a "levels" entry in the configuration
|
||||||
|
if (levels.config) {
|
||||||
|
// Goes through the categories in the levels configuration entry, starting by the "higher" ones.
|
||||||
|
var keys = Object.keys(levels.config).sort();
|
||||||
|
for (var idx = 0; idx < keys.length; idx++) {
|
||||||
|
var levelCategory = keys[idx];
|
||||||
|
if (doesLevelEntryContainsLogger(levelCategory, loggerCategoryName)) {
|
||||||
|
// level for the logger
|
||||||
|
level = levels.config[levelCategory];
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var appenderList;
|
|
||||||
if (!hasLogger(categoryName)) {
|
|
||||||
// Create the logger for this name if it doesn't already exist
|
// Create the logger for this name if it doesn't already exist
|
||||||
loggers[categoryName] = new Logger(categoryName);
|
loggers[loggerCategoryName] = new Logger(loggerCategoryName, level);
|
||||||
if (appenders[categoryName]) {
|
|
||||||
appenderList = appenders[categoryName];
|
var appenderList;
|
||||||
|
for(var appenderCategory in appenders) {
|
||||||
|
if (doesAppenderContainsLogger(appenderCategory, loggerCategoryName)) {
|
||||||
|
appenderList = appenders[appenderCategory];
|
||||||
appenderList.forEach(function(appender) {
|
appenderList.forEach(function(appender) {
|
||||||
loggers[categoryName].addListener("log", appender);
|
loggers[loggerCategoryName].addListener("log", appender);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if (appenders[ALL_CATEGORIES]) {
|
if (appenders[ALL_CATEGORIES]) {
|
||||||
appenderList = appenders[ALL_CATEGORIES];
|
appenderList = appenders[ALL_CATEGORIES];
|
||||||
appenderList.forEach(function(appender) {
|
appenderList.forEach(function(appender) {
|
||||||
loggers[categoryName].addListener("log", appender);
|
loggers[loggerCategoryName].addListener("log", appender);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return loggers[categoryName];
|
return loggers[loggerCategoryName];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -141,13 +178,19 @@ function addAppender () {
|
|||||||
args = args[0];
|
args = args[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
args.forEach(function(category) {
|
args.forEach(function(appenderCategory) {
|
||||||
addAppenderToCategory(appender, category);
|
addAppenderToCategory(appender, appenderCategory);
|
||||||
|
|
||||||
if (category === ALL_CATEGORIES) {
|
if (appenderCategory === ALL_CATEGORIES) {
|
||||||
addAppenderToAllLoggers(appender);
|
addAppenderToAllLoggers(appender);
|
||||||
} else if (hasLogger(category)) {
|
} else {
|
||||||
loggers[category].addListener("log", appender);
|
|
||||||
|
for(var loggerCategory in loggers) {
|
||||||
|
if (doesAppenderContainsLogger(appenderCategory,loggerCategory)) {
|
||||||
|
loggers[loggerCategory].addListener("log", appender);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -198,14 +241,19 @@ function configureAppenders(appenderList, options) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function configureLevels(levels) {
|
function configureLevels(_levels) {
|
||||||
if (levels) {
|
levels.config = _levels; // Keep it so we can create loggers later using this cfg
|
||||||
for (var category in levels) {
|
if (_levels) {
|
||||||
if (levels.hasOwnProperty(category)) {
|
var keys = Object.keys(levels.config).sort();
|
||||||
|
for (var idx in keys) {
|
||||||
|
var category = keys[idx];
|
||||||
if(category === ALL_CATEGORIES) {
|
if(category === ALL_CATEGORIES) {
|
||||||
setGlobalLogLevel(levels[category]);
|
setGlobalLogLevel(_levels[category]);
|
||||||
|
}
|
||||||
|
for(var loggerCategory in loggers) {
|
||||||
|
if (doesLevelEntryContainsLogger(category, loggerCategory)) {
|
||||||
|
loggers[loggerCategory].setLevel(_levels[category]);
|
||||||
}
|
}
|
||||||
getLogger(category).setLevel(levels[category]);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -236,8 +284,8 @@ function loadConfigurationFile(filename) {
|
|||||||
function configureOnceOff(config, options) {
|
function configureOnceOff(config, options) {
|
||||||
if (config) {
|
if (config) {
|
||||||
try {
|
try {
|
||||||
configureAppenders(config.appenders, options);
|
|
||||||
configureLevels(config.levels);
|
configureLevels(config.levels);
|
||||||
|
configureAppenders(config.appenders, options);
|
||||||
|
|
||||||
if (config.replaceConsole) {
|
if (config.replaceConsole) {
|
||||||
replaceConsole();
|
replaceConsole();
|
||||||
@ -253,12 +301,12 @@ function configureOnceOff(config, options) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function reloadConfiguration() {
|
function reloadConfiguration(options) {
|
||||||
var mtime = getMTime(configState.filename);
|
var mtime = getMTime(configState.filename);
|
||||||
if (!mtime) return;
|
if (!mtime) return;
|
||||||
|
|
||||||
if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
|
if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
|
||||||
configureOnceOff(loadConfigurationFile(configState.filename));
|
configureOnceOff(loadConfigurationFile(configState.filename), options);
|
||||||
}
|
}
|
||||||
configState.lastMTime = mtime;
|
configState.lastMTime = mtime;
|
||||||
}
|
}
|
||||||
@ -280,7 +328,7 @@ function initReloadConfiguration(filename, options) {
|
|||||||
}
|
}
|
||||||
configState.filename = filename;
|
configState.filename = filename;
|
||||||
configState.lastMTime = getMTime(filename);
|
configState.lastMTime = getMTime(filename);
|
||||||
configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000);
|
configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
function configure(configurationFileOrObject, options) {
|
function configure(configurationFileOrObject, options) {
|
||||||
|
@ -51,7 +51,7 @@ Logger.prototype.removeLevel = function() {
|
|||||||
|
|
||||||
Logger.prototype.log = function() {
|
Logger.prototype.log = function() {
|
||||||
var args = Array.prototype.slice.call(arguments)
|
var args = Array.prototype.slice.call(arguments)
|
||||||
, logLevel = levels.toLevel(args.shift())
|
, logLevel = levels.toLevel(args.shift(), levels.INFO)
|
||||||
, loggingEvent;
|
, loggingEvent;
|
||||||
if (this.isLevelEnabled(logLevel)) {
|
if (this.isLevelEnabled(logLevel)) {
|
||||||
loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
|
loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
|
||||||
@ -63,7 +63,7 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
|
|||||||
return this.level.isLessThanOrEqualTo(otherLevel);
|
return this.level.isLessThanOrEqualTo(otherLevel);
|
||||||
};
|
};
|
||||||
|
|
||||||
['Trace','Debug','Info','Warn','Error','Fatal'].forEach(
|
['Trace','Debug','Info','Warn','Error','Fatal', 'Mark'].forEach(
|
||||||
function(levelString) {
|
function(levelString) {
|
||||||
var level = levels.toLevel(levelString);
|
var level = levels.toLevel(levelString);
|
||||||
Logger.prototype['is'+levelString+'Enabled'] = function() {
|
Logger.prototype['is'+levelString+'Enabled'] = function() {
|
||||||
|
@ -16,7 +16,11 @@ module.exports = BaseRollingFileStream;
|
|||||||
function BaseRollingFileStream(filename, options) {
|
function BaseRollingFileStream(filename, options) {
|
||||||
debug("In BaseRollingFileStream");
|
debug("In BaseRollingFileStream");
|
||||||
this.filename = filename;
|
this.filename = filename;
|
||||||
this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
|
this.options = options || {};
|
||||||
|
this.options.encoding = this.options.encoding || 'utf8';
|
||||||
|
this.options.mode = this.options.mode || parseInt('0644', 8);
|
||||||
|
this.options.flags = this.options.flags || 'a';
|
||||||
|
|
||||||
this.currentSize = 0;
|
this.currentSize = 0;
|
||||||
|
|
||||||
function currentFileSize(file) {
|
function currentFileSize(file) {
|
||||||
|
@ -3,6 +3,8 @@ var BaseRollingFileStream = require('./BaseRollingFileStream')
|
|||||||
, debug = require('../debug')('RollingFileStream')
|
, debug = require('../debug')('RollingFileStream')
|
||||||
, util = require('util')
|
, util = require('util')
|
||||||
, path = require('path')
|
, path = require('path')
|
||||||
|
, child_process = require('child_process')
|
||||||
|
, zlib = require("zlib")
|
||||||
, fs = require('fs')
|
, fs = require('fs')
|
||||||
, async = require('async');
|
, async = require('async');
|
||||||
|
|
||||||
@ -25,7 +27,7 @@ function RollingFileStream (filename, size, backups, options) {
|
|||||||
util.inherits(RollingFileStream, BaseRollingFileStream);
|
util.inherits(RollingFileStream, BaseRollingFileStream);
|
||||||
|
|
||||||
RollingFileStream.prototype.shouldRoll = function() {
|
RollingFileStream.prototype.shouldRoll = function() {
|
||||||
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
|
debug("should roll with current size " + this.currentSize + " and max size " + this.size);
|
||||||
return this.currentSize >= this.size;
|
return this.currentSize >= this.size;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -38,6 +40,7 @@ RollingFileStream.prototype.roll = function(filename, callback) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function index(filename_) {
|
function index(filename_) {
|
||||||
|
debug('Calculating index of '+filename_);
|
||||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,16 +54,42 @@ RollingFileStream.prototype.roll = function(filename, callback) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function compress (filename, cb) {
|
||||||
|
|
||||||
|
var gzip = zlib.createGzip();
|
||||||
|
var inp = fs.createReadStream(filename);
|
||||||
|
var out = fs.createWriteStream(filename+".gz");
|
||||||
|
inp.pipe(gzip).pipe(out);
|
||||||
|
fs.unlink(filename, cb);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
function increaseFileIndex (fileToRename, cb) {
|
function increaseFileIndex (fileToRename, cb) {
|
||||||
var idx = index(fileToRename);
|
var idx = index(fileToRename);
|
||||||
debug('Index of ' + fileToRename + ' is ' + idx);
|
debug('Index of ' + fileToRename + ' is ' + idx);
|
||||||
if (idx < that.backups) {
|
if (idx < that.backups) {
|
||||||
|
|
||||||
|
var ext = path.extname(fileToRename);
|
||||||
|
var destination = filename + '.' + (idx+1);
|
||||||
|
if (that.options.compress && /^gz$/.test(ext.substring(1))) {
|
||||||
|
destination+=ext;
|
||||||
|
}
|
||||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||||
//so, we'll try to delete the file we're renaming to first
|
//so, we'll try to delete the file we're renaming to first
|
||||||
fs.unlink(filename + '.' + (idx+1), function (err) {
|
fs.unlink(destination, function (err) {
|
||||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
debug('Renaming ' + fileToRename + ' -> ' + destination);
|
||||||
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
|
fs.rename(path.join(path.dirname(filename), fileToRename), destination, function(err) {
|
||||||
|
if (err) {
|
||||||
|
cb(err);
|
||||||
|
} else {
|
||||||
|
if (that.options.compress && ext!=".gz") {
|
||||||
|
compress(destination, cb);
|
||||||
|
} else {
|
||||||
|
cb();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
cb();
|
cb();
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "log4js",
|
"name": "log4js",
|
||||||
"version": "0.6.21",
|
"version": "0.6.25",
|
||||||
"description": "Port of Log4js to work with node.",
|
"description": "Port of Log4js to work with node.",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"logging",
|
"logging",
|
||||||
@ -31,7 +31,8 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"async": "~0.2.0",
|
"async": "~0.2.0",
|
||||||
"readable-stream": "~1.0.2",
|
"readable-stream": "~1.0.2",
|
||||||
"semver": "~1.1.4"
|
"semver": "~4.3.3",
|
||||||
|
"underscore": "1.8.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"vows": "0.7.0",
|
"vows": "0.7.0",
|
||||||
|
@ -246,7 +246,50 @@ vows.describe('log4js connect logger').addBatch({
|
|||||||
'should output the response header': function(messages) {
|
'should output the response header': function(messages) {
|
||||||
assert.equal(messages[0].message, 'application/cheese');
|
assert.equal(messages[0].message, 'application/cheese');
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
|
||||||
|
'log events with custom token' : {
|
||||||
|
topic: function(clm) {
|
||||||
|
var ml = new MockLogger();
|
||||||
|
var cb = this.callback;
|
||||||
|
ml.level = levels.INFO;
|
||||||
|
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url :custom_string', tokens: [{
|
||||||
|
token: ':custom_string', replacement: 'fooBAR'
|
||||||
|
}] } );
|
||||||
|
request(cl, 'GET', 'http://url', 200);
|
||||||
|
setTimeout(function() {
|
||||||
|
cb(null, ml.messages);
|
||||||
|
},10);
|
||||||
|
},
|
||||||
|
|
||||||
|
'check message': function(messages) {
|
||||||
|
assert.isArray(messages);
|
||||||
|
assert.equal(messages.length, 1);
|
||||||
|
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||||
|
assert.equal(messages[0].message, 'GET http://url fooBAR');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'log events with custom override token' : {
|
||||||
|
topic: function(clm) {
|
||||||
|
var ml = new MockLogger();
|
||||||
|
var cb = this.callback;
|
||||||
|
ml.level = levels.INFO;
|
||||||
|
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url :date', tokens: [{
|
||||||
|
token: ':date', replacement: "20150310"
|
||||||
|
}] } );
|
||||||
|
request(cl, 'GET', 'http://url', 200);
|
||||||
|
setTimeout(function() {
|
||||||
|
cb(null, ml.messages);
|
||||||
|
},10);
|
||||||
|
},
|
||||||
|
|
||||||
|
'check message': function(messages) {
|
||||||
|
assert.isArray(messages);
|
||||||
|
assert.equal(messages.length, 1);
|
||||||
|
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||||
|
assert.equal(messages[0].message, 'GET http://url 20150310');
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}).export(module);
|
}).export(module);
|
||||||
|
@ -3,11 +3,13 @@ var vows = require('vows')
|
|||||||
, assert = require('assert')
|
, assert = require('assert')
|
||||||
, dateFormat = require('../lib/date_format');
|
, dateFormat = require('../lib/date_format');
|
||||||
|
|
||||||
|
function createFixedDate() {
|
||||||
|
return new Date(2010, 0, 11, 14, 31, 30, 5);
|
||||||
|
}
|
||||||
|
|
||||||
vows.describe('date_format').addBatch({
|
vows.describe('date_format').addBatch({
|
||||||
'Date extensions': {
|
'Date extensions': {
|
||||||
topic: function() {
|
topic: createFixedDate,
|
||||||
return new Date(2010, 0, 11, 14, 31, 30, 5);
|
|
||||||
},
|
|
||||||
'should format a date as string using a pattern': function(date) {
|
'should format a date as string using a pattern': function(date) {
|
||||||
assert.equal(
|
assert.equal(
|
||||||
dateFormat.asString(dateFormat.DATETIME_FORMAT, date),
|
dateFormat.asString(dateFormat.DATETIME_FORMAT, date),
|
||||||
@ -20,13 +22,16 @@ vows.describe('date_format').addBatch({
|
|||||||
'2010-01-11 14:31:30.005'
|
'2010-01-11 14:31:30.005'
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
'should provide a ISO8601 with timezone offset format': function(date) {
|
'should provide a ISO8601 with timezone offset format': function() {
|
||||||
|
var date = createFixedDate();
|
||||||
|
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() - 660);
|
||||||
date.getTimezoneOffset = function() { return -660; };
|
date.getTimezoneOffset = function() { return -660; };
|
||||||
assert.equal(
|
assert.equal(
|
||||||
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
||||||
"2010-01-11T14:31:30+1100"
|
"2010-01-11T14:31:30+1100"
|
||||||
);
|
);
|
||||||
|
date = createFixedDate();
|
||||||
|
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120);
|
||||||
date.getTimezoneOffset = function() { return 120; };
|
date.getTimezoneOffset = function() { return 120; };
|
||||||
assert.equal(
|
assert.equal(
|
||||||
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
||||||
@ -40,7 +45,9 @@ vows.describe('date_format').addBatch({
|
|||||||
'14:31:30.005'
|
'14:31:30.005'
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
'should provide a custom format': function(date) {
|
'should provide a custom format': function() {
|
||||||
|
var date = createFixedDate();
|
||||||
|
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120);
|
||||||
date.getTimezoneOffset = function() { return 120; };
|
date.getTimezoneOffset = function() { return 120; };
|
||||||
assert.equal(
|
assert.equal(
|
||||||
dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date),
|
dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date),
|
||||||
|
@ -5,6 +5,7 @@ var vows = require('vows')
|
|||||||
, sandbox = require('sandboxed-module')
|
, sandbox = require('sandboxed-module')
|
||||||
, log4js = require('../lib/log4js')
|
, log4js = require('../lib/log4js')
|
||||||
, assert = require('assert')
|
, assert = require('assert')
|
||||||
|
, zlib = require('zlib')
|
||||||
, EOL = require('os').EOL || '\n';
|
, EOL = require('os').EOL || '\n';
|
||||||
|
|
||||||
log4js.clearAppenders();
|
log4js.clearAppenders();
|
||||||
@ -104,6 +105,70 @@ vows.describe('log4js fileAppender').addBatch({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
'fileAppender subcategories': {
|
||||||
|
topic: function() {
|
||||||
|
var that = this;
|
||||||
|
|
||||||
|
log4js.clearAppenders();
|
||||||
|
|
||||||
|
function addAppender(cat) {
|
||||||
|
var testFile = path.join(__dirname, '/fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log');
|
||||||
|
remove(testFile);
|
||||||
|
log4js.addAppender(require('../lib/appenders/file').appender(testFile), cat);
|
||||||
|
return testFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
var file_sub1 = addAppender([ 'sub1']);
|
||||||
|
|
||||||
|
var file_sub1_sub12$sub1_sub13 = addAppender([ 'sub1.sub12', 'sub1.sub13' ]);
|
||||||
|
|
||||||
|
var file_sub1_sub12 = addAppender([ 'sub1.sub12' ]);
|
||||||
|
|
||||||
|
|
||||||
|
var logger_sub1_sub12_sub123 = log4js.getLogger('sub1.sub12.sub123');
|
||||||
|
|
||||||
|
var logger_sub1_sub13_sub133 = log4js.getLogger('sub1.sub13.sub133');
|
||||||
|
|
||||||
|
var logger_sub1_sub14 = log4js.getLogger('sub1.sub14');
|
||||||
|
|
||||||
|
var logger_sub2 = log4js.getLogger('sub2');
|
||||||
|
|
||||||
|
|
||||||
|
logger_sub1_sub12_sub123.info('sub1_sub12_sub123');
|
||||||
|
|
||||||
|
logger_sub1_sub13_sub133.info('sub1_sub13_sub133');
|
||||||
|
|
||||||
|
logger_sub1_sub14.info('sub1_sub14');
|
||||||
|
|
||||||
|
logger_sub2.info('sub2');
|
||||||
|
|
||||||
|
|
||||||
|
setTimeout(function() {
|
||||||
|
that.callback(null, {
|
||||||
|
file_sub1: fs.readFileSync(file_sub1).toString(),
|
||||||
|
file_sub1_sub12$sub1_sub13: fs.readFileSync(file_sub1_sub12$sub1_sub13).toString(),
|
||||||
|
file_sub1_sub12: fs.readFileSync(file_sub1_sub12).toString()
|
||||||
|
});
|
||||||
|
}, 3000);
|
||||||
|
},
|
||||||
|
'check file contents': function (err, fileContents) {
|
||||||
|
|
||||||
|
// everything but category 'sub2'
|
||||||
|
assert.match(fileContents.file_sub1, /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133|sub1.sub14 - sub1_sub14)[\s\S]){3}$/);
|
||||||
|
assert.ok(fileContents.file_sub1.match(/sub123/) && fileContents.file_sub1.match(/sub133/) && fileContents.file_sub1.match(/sub14/));
|
||||||
|
assert.ok(!fileContents.file_sub1.match(/sub2/));
|
||||||
|
|
||||||
|
// only catgories starting with 'sub1.sub12' and 'sub1.sub13'
|
||||||
|
assert.match(fileContents.file_sub1_sub12$sub1_sub13, /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133)[\s\S]){2}$/);
|
||||||
|
assert.ok(fileContents.file_sub1_sub12$sub1_sub13.match(/sub123/) && fileContents.file_sub1_sub12$sub1_sub13.match(/sub133/));
|
||||||
|
assert.ok(!fileContents.file_sub1_sub12$sub1_sub13.match(/sub14|sub2/));
|
||||||
|
|
||||||
|
// only catgories starting with 'sub1.sub12'
|
||||||
|
assert.match(fileContents.file_sub1_sub12, /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123)[\s\S]){1}$/);
|
||||||
|
assert.ok(!fileContents.file_sub1_sub12.match(/sub14|sub2|sub13/));
|
||||||
|
|
||||||
|
}
|
||||||
|
},
|
||||||
'with a max file size and no backups': {
|
'with a max file size and no backups': {
|
||||||
topic: function() {
|
topic: function() {
|
||||||
var testFile = path.join(__dirname, '/fa-maxFileSize-test.log')
|
var testFile = path.join(__dirname, '/fa-maxFileSize-test.log')
|
||||||
@ -214,6 +279,79 @@ vows.describe('log4js fileAppender').addBatch({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
'with a max file size and 2 compressed backups': {
|
||||||
|
topic: function() {
|
||||||
|
var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-compressed-test.log')
|
||||||
|
, logger = log4js.getLogger('max-file-size-backups');
|
||||||
|
remove(testFile);
|
||||||
|
remove(testFile+'.1.gz');
|
||||||
|
remove(testFile+'.2.gz');
|
||||||
|
|
||||||
|
//log file of 50 bytes maximum, 2 backups
|
||||||
|
log4js.clearAppenders();
|
||||||
|
log4js.addAppender(
|
||||||
|
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2, true),
|
||||||
|
'max-file-size-backups'
|
||||||
|
);
|
||||||
|
logger.info("This is the first log message.");
|
||||||
|
logger.info("This is the second log message.");
|
||||||
|
logger.info("This is the third log message.");
|
||||||
|
logger.info("This is the fourth log message.");
|
||||||
|
var that = this;
|
||||||
|
//give the system a chance to open the stream
|
||||||
|
setTimeout(function() {
|
||||||
|
fs.readdir(__dirname, function(err, files) {
|
||||||
|
if (files) {
|
||||||
|
that.callback(null, files.sort());
|
||||||
|
} else {
|
||||||
|
that.callback(err, files);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, 1000);
|
||||||
|
},
|
||||||
|
'the log files': {
|
||||||
|
topic: function(files) {
|
||||||
|
var logFiles = files.filter(
|
||||||
|
function(file) { return file.indexOf('fa-maxFileSize-with-backups-compressed-test.log') > -1; }
|
||||||
|
);
|
||||||
|
return logFiles;
|
||||||
|
},
|
||||||
|
'should be 3': function (files) {
|
||||||
|
assert.equal(files.length, 3);
|
||||||
|
},
|
||||||
|
'should be named in sequence': function (files) {
|
||||||
|
assert.deepEqual(files, [
|
||||||
|
'fa-maxFileSize-with-backups-compressed-test.log',
|
||||||
|
'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
|
||||||
|
'fa-maxFileSize-with-backups-compressed-test.log.2.gz'
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
'and the contents of the first file': {
|
||||||
|
topic: function(logFiles) {
|
||||||
|
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||||
|
},
|
||||||
|
'should be the last log message': function(contents) {
|
||||||
|
assert.include(contents, 'This is the fourth log message.');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'and the contents of the second file': {
|
||||||
|
topic: function(logFiles) {
|
||||||
|
zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[1])), this.callback);
|
||||||
|
},
|
||||||
|
'should be the third log message': function(contents) {
|
||||||
|
assert.include(contents.toString('utf8'), 'This is the third log message.');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'and the contents of the third file': {
|
||||||
|
topic: function(logFiles) {
|
||||||
|
zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[2])), this.callback);
|
||||||
|
},
|
||||||
|
'should be the second log message': function(contents) {
|
||||||
|
assert.include(contents.toString('utf8'), 'This is the second log message.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}).addBatch({
|
}).addBatch({
|
||||||
'configure' : {
|
'configure' : {
|
||||||
|
@ -244,6 +244,7 @@ vows.describe('log4js gelfAppender').addBatch({
|
|||||||
},
|
},
|
||||||
'should pick up the options': function(message) {
|
'should pick up the options': function(message) {
|
||||||
assert.equal(message.host, 'cheese');
|
assert.equal(message.host, 'cheese');
|
||||||
|
assert.isUndefined(message.GELF); // make sure flag was removed
|
||||||
assert.equal(message._facility, 'nonsense');
|
assert.equal(message._facility, 'nonsense');
|
||||||
assert.equal(message._every1, 'Hello every one'); // the default value
|
assert.equal(message._every1, 'Hello every one'); // the default value
|
||||||
assert.equal(message._every2, 'Overwritten!'); // the overwritten value
|
assert.equal(message._every2, 'Overwritten!'); // the overwritten value
|
||||||
|
@ -179,7 +179,7 @@ vows.describe('log4js layouts').addBatch({
|
|||||||
topic: function() {
|
topic: function() {
|
||||||
var event = {
|
var event = {
|
||||||
data: ['this is a test'],
|
data: ['this is a test'],
|
||||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
startTime: new Date('2010-12-05T14:18:30.045Z'), //new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||||
categoryName: "multiple.levels.of.tests",
|
categoryName: "multiple.levels.of.tests",
|
||||||
level: {
|
level: {
|
||||||
toString: function() { return "DEBUG"; }
|
toString: function() { return "DEBUG"; }
|
||||||
@ -282,14 +282,14 @@ vows.describe('log4js layouts').addBatch({
|
|||||||
test(args, '%x{testFunction}', 'testFunctionToken');
|
test(args, '%x{testFunction}', 'testFunctionToken');
|
||||||
},
|
},
|
||||||
'%x{doesNotExist} should output the string stored in tokens': function(args) {
|
'%x{doesNotExist} should output the string stored in tokens': function(args) {
|
||||||
test(args, '%x{doesNotExist}', '%x{doesNotExist}');
|
test(args, '%x{doesNotExist}', 'null');
|
||||||
},
|
},
|
||||||
'%x{fnThatUsesLogEvent} should be able to use the logEvent': function(args) {
|
'%x{fnThatUsesLogEvent} should be able to use the logEvent': function(args) {
|
||||||
test(args, '%x{fnThatUsesLogEvent}', 'DEBUG');
|
test(args, '%x{fnThatUsesLogEvent}', 'DEBUG');
|
||||||
},
|
},
|
||||||
'%x should output the string stored in tokens': function(args) {
|
'%x should output the string stored in tokens': function(args) {
|
||||||
test(args, '%x', '%x');
|
test(args, '%x', 'null');
|
||||||
},
|
}
|
||||||
},
|
},
|
||||||
'layout makers': {
|
'layout makers': {
|
||||||
topic: require('../lib/layouts'),
|
topic: require('../lib/layouts'),
|
||||||
|
@ -43,6 +43,7 @@ vows.describe('levels').addBatch({
|
|||||||
assert.isNotNull(levels.WARN);
|
assert.isNotNull(levels.WARN);
|
||||||
assert.isNotNull(levels.ERROR);
|
assert.isNotNull(levels.ERROR);
|
||||||
assert.isNotNull(levels.FATAL);
|
assert.isNotNull(levels.FATAL);
|
||||||
|
assert.isNotNull(levels.MARK);
|
||||||
assert.isNotNull(levels.OFF);
|
assert.isNotNull(levels.OFF);
|
||||||
},
|
},
|
||||||
'ALL': {
|
'ALL': {
|
||||||
@ -57,6 +58,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -70,6 +72,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -84,6 +87,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -99,6 +103,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -113,6 +118,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -127,6 +133,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -141,6 +148,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -154,6 +162,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -168,6 +177,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
@ -180,6 +190,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]);
|
]);
|
||||||
assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||||
@ -190,6 +201,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]);
|
]);
|
||||||
},
|
},
|
||||||
@ -202,6 +214,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
@ -209,7 +222,7 @@ vows.describe('levels').addBatch({
|
|||||||
'WARN': {
|
'WARN': {
|
||||||
topic: levels.WARN,
|
topic: levels.WARN,
|
||||||
'should be less than ERROR': function(warn) {
|
'should be less than ERROR': function(warn) {
|
||||||
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
|
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.MARK, levels.OFF]);
|
||||||
assertThat(warn).isNotLessThanOrEqualTo([
|
assertThat(warn).isNotLessThanOrEqualTo([
|
||||||
levels.ALL,
|
levels.ALL,
|
||||||
levels.TRACE,
|
levels.TRACE,
|
||||||
@ -224,7 +237,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.DEBUG,
|
levels.DEBUG,
|
||||||
levels.INFO
|
levels.INFO
|
||||||
]);
|
]);
|
||||||
assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
|
assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.MARK, levels.OFF]);
|
||||||
},
|
},
|
||||||
'should only be equal to WARN': function(trace) {
|
'should only be equal to WARN': function(trace) {
|
||||||
assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
|
assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
|
||||||
@ -242,7 +255,7 @@ vows.describe('levels').addBatch({
|
|||||||
'ERROR': {
|
'ERROR': {
|
||||||
topic: levels.ERROR,
|
topic: levels.ERROR,
|
||||||
'should be less than FATAL': function(error) {
|
'should be less than FATAL': function(error) {
|
||||||
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.OFF]);
|
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||||
assertThat(error).isNotLessThanOrEqualTo([
|
assertThat(error).isNotLessThanOrEqualTo([
|
||||||
levels.ALL,
|
levels.ALL,
|
||||||
levels.TRACE,
|
levels.TRACE,
|
||||||
@ -259,7 +272,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.INFO,
|
levels.INFO,
|
||||||
levels.WARN
|
levels.WARN
|
||||||
]);
|
]);
|
||||||
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.OFF]);
|
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||||
},
|
},
|
||||||
'should only be equal to ERROR': function(trace) {
|
'should only be equal to ERROR': function(trace) {
|
||||||
assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
|
assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
|
||||||
@ -270,6 +283,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.INFO,
|
levels.INFO,
|
||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.FATAL,
|
levels.FATAL,
|
||||||
|
levels.MARK,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
@ -277,7 +291,7 @@ vows.describe('levels').addBatch({
|
|||||||
'FATAL': {
|
'FATAL': {
|
||||||
topic: levels.FATAL,
|
topic: levels.FATAL,
|
||||||
'should be less than OFF': function(fatal) {
|
'should be less than OFF': function(fatal) {
|
||||||
assertThat(fatal).isLessThanOrEqualTo([levels.OFF]);
|
assertThat(fatal).isLessThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||||
assertThat(fatal).isNotLessThanOrEqualTo([
|
assertThat(fatal).isNotLessThanOrEqualTo([
|
||||||
levels.ALL,
|
levels.ALL,
|
||||||
levels.TRACE,
|
levels.TRACE,
|
||||||
@ -296,7 +310,7 @@ vows.describe('levels').addBatch({
|
|||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR
|
levels.ERROR
|
||||||
]);
|
]);
|
||||||
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.OFF]);
|
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||||
},
|
},
|
||||||
'should only be equal to FATAL': function(fatal) {
|
'should only be equal to FATAL': function(fatal) {
|
||||||
assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
|
assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
|
||||||
@ -307,6 +321,47 @@ vows.describe('levels').addBatch({
|
|||||||
levels.INFO,
|
levels.INFO,
|
||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
|
levels.MARK,
|
||||||
|
levels.OFF
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'MARK': {
|
||||||
|
topic: levels.MARK,
|
||||||
|
'should be less than OFF': function(mark) {
|
||||||
|
assertThat(mark).isLessThanOrEqualTo([levels.OFF]);
|
||||||
|
assertThat(mark).isNotLessThanOrEqualTo([
|
||||||
|
levels.ALL,
|
||||||
|
levels.TRACE,
|
||||||
|
levels.DEBUG,
|
||||||
|
levels.INFO,
|
||||||
|
levels.WARN,
|
||||||
|
levels.FATAL,
|
||||||
|
levels.ERROR
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
'should be greater than FATAL': function(mark) {
|
||||||
|
assertThat(mark).isGreaterThanOrEqualTo([
|
||||||
|
levels.ALL,
|
||||||
|
levels.TRACE,
|
||||||
|
levels.DEBUG,
|
||||||
|
levels.INFO,
|
||||||
|
levels.WARN,
|
||||||
|
levels.ERROR,
|
||||||
|
levels.FATAL
|
||||||
|
]);
|
||||||
|
assertThat(mark).isNotGreaterThanOrEqualTo([levels.OFF]);
|
||||||
|
},
|
||||||
|
'should only be equal to MARK': function(mark) {
|
||||||
|
assertThat(mark).isEqualTo([levels.toLevel("MARK")]);
|
||||||
|
assertThat(mark).isNotEqualTo([
|
||||||
|
levels.ALL,
|
||||||
|
levels.TRACE,
|
||||||
|
levels.DEBUG,
|
||||||
|
levels.INFO,
|
||||||
|
levels.WARN,
|
||||||
|
levels.ERROR,
|
||||||
|
levels.FATAL,
|
||||||
levels.OFF
|
levels.OFF
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
@ -321,7 +376,8 @@ vows.describe('levels').addBatch({
|
|||||||
levels.INFO,
|
levels.INFO,
|
||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL
|
levels.FATAL,
|
||||||
|
levels.MARK
|
||||||
]);
|
]);
|
||||||
},
|
},
|
||||||
'should be greater than everything': function(off) {
|
'should be greater than everything': function(off) {
|
||||||
@ -332,7 +388,8 @@ vows.describe('levels').addBatch({
|
|||||||
levels.INFO,
|
levels.INFO,
|
||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL
|
levels.FATAL,
|
||||||
|
levels.MARK
|
||||||
]);
|
]);
|
||||||
},
|
},
|
||||||
'should only be equal to OFF': function(off) {
|
'should only be equal to OFF': function(off) {
|
||||||
@ -344,7 +401,8 @@ vows.describe('levels').addBatch({
|
|||||||
levels.INFO,
|
levels.INFO,
|
||||||
levels.WARN,
|
levels.WARN,
|
||||||
levels.ERROR,
|
levels.ERROR,
|
||||||
levels.FATAL
|
levels.FATAL,
|
||||||
|
levels.MARK
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -353,14 +411,14 @@ vows.describe('levels').addBatch({
|
|||||||
topic: levels.INFO,
|
topic: levels.INFO,
|
||||||
'should handle string arguments': function(info) {
|
'should handle string arguments': function(info) {
|
||||||
assertThat(info).isGreaterThanOrEqualTo(["all", "trace", "debug"]);
|
assertThat(info).isGreaterThanOrEqualTo(["all", "trace", "debug"]);
|
||||||
assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
|
assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'isLessThanOrEqualTo': {
|
'isLessThanOrEqualTo': {
|
||||||
topic: levels.INFO,
|
topic: levels.INFO,
|
||||||
'should handle string arguments': function(info) {
|
'should handle string arguments': function(info) {
|
||||||
assertThat(info).isNotLessThanOrEqualTo(["all", "trace", "debug"]);
|
assertThat(info).isNotLessThanOrEqualTo(["all", "trace", "debug"]);
|
||||||
assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
|
assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'isEqualTo': {
|
'isEqualTo': {
|
||||||
|
@ -74,7 +74,6 @@ vows.describe('log4js smtpAppender').addBatch({
|
|||||||
topic: function() {
|
topic: function() {
|
||||||
var setup = setupLogging('minimal config', {
|
var setup = setupLogging('minimal config', {
|
||||||
recipients: 'recipient@domain.com',
|
recipients: 'recipient@domain.com',
|
||||||
transport: "SMTP",
|
|
||||||
SMTP: {
|
SMTP: {
|
||||||
port: 25,
|
port: 25,
|
||||||
auth: {
|
auth: {
|
||||||
@ -98,7 +97,6 @@ vows.describe('log4js smtpAppender').addBatch({
|
|||||||
recipients: 'recipient@domain.com',
|
recipients: 'recipient@domain.com',
|
||||||
sender: 'sender@domain.com',
|
sender: 'sender@domain.com',
|
||||||
subject: 'This is subject',
|
subject: 'This is subject',
|
||||||
transport: "SMTP",
|
|
||||||
SMTP: {
|
SMTP: {
|
||||||
port: 25,
|
port: 25,
|
||||||
auth: {
|
auth: {
|
||||||
@ -134,7 +132,6 @@ vows.describe('log4js smtpAppender').addBatch({
|
|||||||
var self = this;
|
var self = this;
|
||||||
var setup = setupLogging('separate email for each event', {
|
var setup = setupLogging('separate email for each event', {
|
||||||
recipients: 'recipient@domain.com',
|
recipients: 'recipient@domain.com',
|
||||||
transport: "SMTP",
|
|
||||||
SMTP: {
|
SMTP: {
|
||||||
port: 25,
|
port: 25,
|
||||||
auth: {
|
auth: {
|
||||||
@ -168,7 +165,6 @@ vows.describe('log4js smtpAppender').addBatch({
|
|||||||
var setup = setupLogging('multiple events in one email', {
|
var setup = setupLogging('multiple events in one email', {
|
||||||
recipients: 'recipient@domain.com',
|
recipients: 'recipient@domain.com',
|
||||||
sendInterval: 1,
|
sendInterval: 1,
|
||||||
transport: "SMTP",
|
|
||||||
SMTP: {
|
SMTP: {
|
||||||
port: 25,
|
port: 25,
|
||||||
auth: {
|
auth: {
|
||||||
@ -206,7 +202,6 @@ vows.describe('log4js smtpAppender').addBatch({
|
|||||||
var setup = setupLogging('error when sending email', {
|
var setup = setupLogging('error when sending email', {
|
||||||
recipients: 'recipient@domain.com',
|
recipients: 'recipient@domain.com',
|
||||||
sendInterval: 0,
|
sendInterval: 0,
|
||||||
transport: 'SMTP',
|
|
||||||
SMTP: { port: 25, auth: { user: 'user@domain.com' } }
|
SMTP: { port: 25, auth: { user: 'user@domain.com' } }
|
||||||
});
|
});
|
||||||
|
|
||||||
|
86
test/subcategories-test.js
Normal file
86
test/subcategories-test.js
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
"use strict";
|
||||||
|
var assert = require('assert')
|
||||||
|
, vows = require('vows')
|
||||||
|
, sandbox = require('sandboxed-module')
|
||||||
|
, log4js = require('../lib/log4js')
|
||||||
|
, levels = require('../lib/levels');
|
||||||
|
|
||||||
|
vows.describe('subcategories').addBatch({
|
||||||
|
'loggers created after levels configuration is loaded': {
|
||||||
|
topic: function() {
|
||||||
|
|
||||||
|
log4js.configure({
|
||||||
|
"levels": {
|
||||||
|
"sub1": "WARN",
|
||||||
|
"sub1.sub11": "TRACE",
|
||||||
|
"sub1.sub11.sub111": "WARN",
|
||||||
|
"sub1.sub12": "INFO"
|
||||||
|
}
|
||||||
|
}, { reloadSecs: 30 })
|
||||||
|
|
||||||
|
return {
|
||||||
|
"sub1": log4js.getLogger('sub1'), // WARN
|
||||||
|
"sub11": log4js.getLogger('sub1.sub11'), // TRACE
|
||||||
|
"sub111": log4js.getLogger('sub1.sub11.sub111'), // WARN
|
||||||
|
"sub12": log4js.getLogger('sub1.sub12'), // INFO
|
||||||
|
|
||||||
|
"sub13": log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
|
||||||
|
"sub112": log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
|
||||||
|
"sub121": log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
|
||||||
|
"sub0": log4js.getLogger('sub0') // Not defined, not inherited: TRACE
|
||||||
|
};
|
||||||
|
},
|
||||||
|
'check logger levels': function(loggers) {
|
||||||
|
assert.equal(loggers.sub1.level, levels.WARN);
|
||||||
|
assert.equal(loggers.sub11.level, levels.TRACE);
|
||||||
|
assert.equal(loggers.sub111.level, levels.WARN);
|
||||||
|
assert.equal(loggers.sub12.level, levels.INFO);
|
||||||
|
|
||||||
|
assert.equal(loggers.sub13.level, levels.WARN);
|
||||||
|
assert.equal(loggers.sub112.level, levels.TRACE);
|
||||||
|
assert.equal(loggers.sub121.level, levels.INFO);
|
||||||
|
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'loggers created before levels configuration is loaded': {
|
||||||
|
topic: function() {
|
||||||
|
|
||||||
|
var loggers = {
|
||||||
|
"sub1": log4js.getLogger('sub1'), // WARN
|
||||||
|
"sub11": log4js.getLogger('sub1.sub11'), // TRACE
|
||||||
|
"sub111": log4js.getLogger('sub1.sub11.sub111'), // WARN
|
||||||
|
"sub12": log4js.getLogger('sub1.sub12'), // INFO
|
||||||
|
|
||||||
|
"sub13": log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
|
||||||
|
"sub112": log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
|
||||||
|
"sub121": log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
|
||||||
|
"sub0": log4js.getLogger('sub0') // Not defined, not inherited: TRACE
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
log4js.configure({
|
||||||
|
"levels": {
|
||||||
|
"sub1": "WARN",
|
||||||
|
"sub1.sub11": "TRACE",
|
||||||
|
"sub1.sub11.sub111": "WARN",
|
||||||
|
"sub1.sub12": "INFO"
|
||||||
|
}
|
||||||
|
}, { reloadSecs: 30 })
|
||||||
|
|
||||||
|
return loggers;
|
||||||
|
|
||||||
|
|
||||||
|
},
|
||||||
|
'check logger levels': function(loggers) {
|
||||||
|
assert.equal(loggers.sub1.level, levels.WARN);
|
||||||
|
assert.equal(loggers.sub11.level, levels.TRACE);
|
||||||
|
assert.equal(loggers.sub111.level, levels.WARN);
|
||||||
|
assert.equal(loggers.sub12.level, levels.INFO);
|
||||||
|
|
||||||
|
assert.equal(loggers.sub13.level, levels.WARN);
|
||||||
|
assert.equal(loggers.sub112.level, levels.TRACE);
|
||||||
|
assert.equal(loggers.sub121.level, levels.INFO);
|
||||||
|
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).exportTo(module);
|
Loading…
Reference in New Issue
Block a user