working fileappender, with tests, broken everything else
This commit is contained in:
parent
80474c6881
commit
545681287f
20
lib/appenders/console.js
Normal file
20
lib/appenders/console.js
Normal file
@ -0,0 +1,20 @@
|
||||
var layouts = require('../layouts');
|
||||
|
||||
function consoleAppender (layout) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
console._preLog4js_log(layout(loggingEvent));
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return consoleAppender(layout);
|
||||
}
|
||||
|
||||
exports.name = "console";
|
||||
exports.appender = consoleAppender;
|
||||
exports.configure = configure;
|
124
lib/appenders/file.js
Normal file
124
lib/appenders/file.js
Normal file
@ -0,0 +1,124 @@
|
||||
var layouts = require('../layouts')
|
||||
, fs = require('fs');
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
|
||||
* @param filePollInterval - the time in seconds between file size checks (default 30s)
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
numBackups = numBackups === undefined ? 5 : numBackups;
|
||||
filePollInterval = filePollInterval * 1000 || 30000;
|
||||
|
||||
function setupLogRolling () {
|
||||
fs.watchFile(
|
||||
file,
|
||||
{
|
||||
persistent: false,
|
||||
interval: filePollInterval
|
||||
},
|
||||
function (curr, prev) {
|
||||
if (curr.size >= logSize) {
|
||||
rollThatLog();
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function rollThatLog () {
|
||||
if (numBackups > 0) {
|
||||
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
|
||||
for (var i=numBackups; i > 0; i--) {
|
||||
if (i > 1) {
|
||||
if (fileExists(file + '.' + (i-1))) {
|
||||
fs.renameSync(file+'.'+(i-1), file+'.'+i);
|
||||
}
|
||||
} else {
|
||||
fs.renameSync(file, file+'.1');
|
||||
}
|
||||
}
|
||||
//let's make a new file
|
||||
var newLogFileFD = fs.openSync(file, 'a', 0644)
|
||||
, oldLogFileFD = logFile.fd;
|
||||
logFile.fd = newLogFileFD;
|
||||
fs.close(oldLogFileFD);
|
||||
} else {
|
||||
fs.truncate(logFile.fd, logSize);
|
||||
}
|
||||
}
|
||||
|
||||
function fileExists (filename) {
|
||||
try {
|
||||
fs.statSync(filename);
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function openTheStream() {
|
||||
var stream = fs.createWriteStream(file, { flags: 'a', mode: 0644, encoding: 'utf8' });
|
||||
stream.on("open", function() {
|
||||
canWrite = true;
|
||||
while (logEventBuffer.length > 0 && canWrite) {
|
||||
canWrite = writeToLog(logEventBuffer.shift());
|
||||
}
|
||||
});
|
||||
stream.on("error", function (err) {
|
||||
console.error("log4js.fileAppender - Error happened ", err);
|
||||
});
|
||||
stream.on("drain", function() {
|
||||
canWrite = true;
|
||||
while (logEventBuffer.length > 0 && canWrite) {
|
||||
canWrite = writeToLog(logEventBuffer.shift());
|
||||
}
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
|
||||
var logEventBuffer = []
|
||||
, canWrite = false
|
||||
, logFile = openTheStream();
|
||||
|
||||
if (logSize > 0) {
|
||||
setupLogRolling();
|
||||
}
|
||||
|
||||
//close the file on process exit.
|
||||
process.on('exit', function() {
|
||||
logFile.end();
|
||||
logFile.destroy();
|
||||
});
|
||||
|
||||
function writeToLog(loggingEvent) {
|
||||
return logFile.write(layout(loggingEvent)+'\n', "utf8");
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
//because the log stream is opened asynchronously, we don't want to write
|
||||
//until it is ready.
|
||||
if (canWrite) {
|
||||
canWrite = writeToLog(loggingEvent);
|
||||
} else {
|
||||
logEventBuffer.push(loggingEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.pollInterval);
|
||||
}
|
||||
|
||||
exports.name = "file";
|
||||
exports.appender = fileAppender;
|
||||
exports.configure = configure;
|
19
lib/appenders/logLevelFilter.js
Normal file
19
lib/appenders/logLevelFilter.js
Normal file
@ -0,0 +1,19 @@
|
||||
var levels = require('../levels');
|
||||
|
||||
function logLevelFilter (levelString, appender) {
|
||||
var level = levels.toLevel(levelString);
|
||||
return function(logEvent) {
|
||||
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
|
||||
appender(logEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var appender = config.makers[config.appender.type](config.appender);
|
||||
return logLevelFilter(config.level, appender);
|
||||
}
|
||||
|
||||
exports.name = "logLevelFilter";
|
||||
exports.appender = logLevelFilter;
|
||||
exports.configure = configure;
|
186
lib/log4js.js
186
lib/log4js.js
@ -50,30 +50,12 @@ var events = require('events')
|
||||
, sys = require('sys')
|
||||
, layouts = require('./layouts')
|
||||
, levels = require('./levels')
|
||||
, consoleAppender = require('./appenders/console').appender
|
||||
, DEFAULT_CATEGORY = '[default]'
|
||||
, ALL_CATEGORIES = '[all]'
|
||||
, appenders = {}
|
||||
, loggers = {}
|
||||
, appenderMakers = {
|
||||
"file": function(config, fileAppender) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.pollInterval);
|
||||
},
|
||||
"console": function(config, fileAppender, consoleAppender) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return consoleAppender(layout);
|
||||
},
|
||||
"logLevelFilter": function(config, fileAppender, consoleAppender) {
|
||||
var appender = appenderMakers[config.appender.type](config.appender, fileAppender, consoleAppender);
|
||||
return logLevelFilter(config.level, appender);
|
||||
}
|
||||
};
|
||||
, appenderMakers = {};
|
||||
|
||||
/**
|
||||
* Get a logger instance. Instance is cached on categoryName level.
|
||||
@ -150,11 +132,11 @@ function clearAppenders () {
|
||||
}
|
||||
}
|
||||
|
||||
function configureAppenders(appenderList, fileAppender, consoleAppender) {
|
||||
function configureAppenders(appenderList) {
|
||||
clearAppenders();
|
||||
if (appenderList) {
|
||||
appenderList.forEach(function(appenderConfig) {
|
||||
var appender = appenderMakers[appenderConfig.type](appenderConfig, fileAppender, consoleAppender);
|
||||
var appender = appenderMakers[appenderConfig.type](appenderConfig);
|
||||
if (appender) {
|
||||
addAppender(appender, appenderConfig.category);
|
||||
} else {
|
||||
@ -258,136 +240,6 @@ function getDefaultLogger () {
|
||||
return getLogger(DEFAULT_CATEGORY);
|
||||
}
|
||||
|
||||
function logLevelFilter (levelString, appender) {
|
||||
var level = levels.toLevel(levelString);
|
||||
return function(logEvent) {
|
||||
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
|
||||
appender(logEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function consoleAppender (layout) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
console._preLog4js_log(layout(loggingEvent));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
|
||||
* @param filePollInterval - the time in seconds between file size checks (default 30s)
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
numBackups = numBackups || 5;
|
||||
filePollInterval = filePollInterval * 1000 || 30000;
|
||||
|
||||
function setupLogRolling () {
|
||||
fs.watchFile(
|
||||
file,
|
||||
{
|
||||
persistent: false,
|
||||
interval: filePollInterval
|
||||
},
|
||||
function (curr, prev) {
|
||||
if (curr.size >= logSize) {
|
||||
rollThatLog();
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function rollThatLog () {
|
||||
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
|
||||
for (var i=numBackups; i > 0; i--) {
|
||||
if (i > 1) {
|
||||
if (fileExists(file + '.' + (i-1))) {
|
||||
console.debug("Renaming " + file + '.' + (i-1) + ' -> ' + file + '.' + i);
|
||||
fs.renameSync(file+'.'+(i-1), file+'.'+i);
|
||||
}
|
||||
} else {
|
||||
console.debug("Renaming " + file + " -> " + file + ".1");
|
||||
fs.renameSync(file, file+'.1');
|
||||
}
|
||||
}
|
||||
//let's make a new file
|
||||
var newLogFileFD = fs.openSync(file, 'a', 0644)
|
||||
, oldLogFileFD = logFile.fd;
|
||||
console.debug("made a new logFile ", newLogFileFD);
|
||||
logFile.fd = newLogFileFD;
|
||||
console.debug("closing old log file fd ", oldLogFileFD);
|
||||
fs.close(oldLogFileFD);
|
||||
}
|
||||
|
||||
function fileExists (filename) {
|
||||
try {
|
||||
fs.statSync(filename);
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function openTheStream() {
|
||||
console.debug("Creating the write Stream with file ", file);
|
||||
var stream = fs.createWriteStream(file, { flags: 'a', mode: 0644, encoding: 'utf8' });
|
||||
stream.on("open", function() {
|
||||
canWrite = true;
|
||||
console.debug("Stream is open, writing %d buffered events", logEventBuffer.length);
|
||||
while (logEventBuffer.length > 0 && canWrite) {
|
||||
canWrite = writeToLog(logEventBuffer.shift());
|
||||
}
|
||||
});
|
||||
stream.on("error", function (err) {
|
||||
console.error("Error happened ", err);
|
||||
});
|
||||
stream.on("drain", function() {
|
||||
canWrite = true;
|
||||
while (logEventBuffer.length > 0 && canWrite) {
|
||||
canWrite = writeToLog(logEventBuffer.shift());
|
||||
}
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
|
||||
var logEventBuffer = []
|
||||
, canWrite = false
|
||||
, logFile = openTheStream();
|
||||
|
||||
if (logSize > 0) {
|
||||
setupLogRolling();
|
||||
}
|
||||
|
||||
//close the file on process exit.
|
||||
process.on('exit', function() {
|
||||
console.debug("Buffer contains %d events", logEventBuffer.length);
|
||||
logFile.end();
|
||||
logFile.destroy();
|
||||
});
|
||||
|
||||
function writeToLog(loggingEvent) {
|
||||
return logFile.write(layout(loggingEvent)+'\n', "utf8");
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
//because the log stream is opened asynchronously, we don't want to write
|
||||
//until it is ready.
|
||||
if (canWrite) {
|
||||
canWrite = writeToLog(loggingEvent);
|
||||
} else {
|
||||
logEventBuffer.push(loggingEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure (configurationFileOrObject) {
|
||||
var config = configurationFileOrObject;
|
||||
if (typeof(config) === "string") {
|
||||
@ -395,7 +247,8 @@ function configure (configurationFileOrObject) {
|
||||
}
|
||||
if (config) {
|
||||
try {
|
||||
configureAppenders(config.appenders, fileAppender, consoleAppender);
|
||||
config.makers = appenderMakers;
|
||||
configureAppenders(config.appenders);
|
||||
configureLevels(config.levels);
|
||||
} catch (e) {
|
||||
throw new Error("Problem reading log4js config " + sys.inspect(config) + ". Error was \"" + e.message + "\" ("+e.stack+")");
|
||||
@ -436,10 +289,14 @@ function replaceConsole(logger) {
|
||||
|
||||
}
|
||||
|
||||
//set ourselves up if we can find a default log4js.json
|
||||
configure(findConfiguration());
|
||||
//replace console.log, etc with log4js versions
|
||||
replaceConsole(getLogger("console"));
|
||||
function loadAppenders() {
|
||||
var appenderList = fs.readdirSync(__dirname + '/appenders');
|
||||
appenderList.forEach(function(file) {
|
||||
var appenderModule = require('./appenders/' + file);
|
||||
module.exports.appenders[appenderModule.name] = appenderModule.appender;
|
||||
appenderMakers[appenderModule.name] = appenderModule.configure;
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLogger: getLogger,
|
||||
@ -452,16 +309,23 @@ module.exports = {
|
||||
levels: levels,
|
||||
setGlobalLogLevel: setGlobalLogLevel,
|
||||
|
||||
consoleAppender: consoleAppender,
|
||||
fileAppender: fileAppender,
|
||||
logLevelFilter: logLevelFilter,
|
||||
|
||||
layouts: layouts,
|
||||
appenders: {},
|
||||
connectLogger: require('./connect-logger').connectLogger
|
||||
};
|
||||
|
||||
loadAppenders();
|
||||
//set ourselves up if we can find a default log4js.json
|
||||
configure(findConfiguration());
|
||||
//replace console.log, etc with log4js versions
|
||||
replaceConsole(getLogger("console"));
|
||||
|
||||
//keep the old-style layouts
|
||||
['basicLayout','messagePassThroughLayout','colouredLayout','coloredLayout'].forEach(function(item) {
|
||||
module.exports[item] = layouts[item];
|
||||
});
|
||||
|
||||
//and the old-style appenders
|
||||
module.exports.consoleAppender = module.exports.appenders.console;
|
||||
module.exports.fileAppender = module.exports.appenders.file;
|
||||
module.exports.logLevelFilter = module.exports.appenders.logLevelFilter;
|
||||
|
126
test/fileAppender.js
Normal file
126
test/fileAppender.js
Normal file
@ -0,0 +1,126 @@
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, log4js = require('../lib/log4js')
|
||||
, assert = require('assert');
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js fileAppender').addBatch({
|
||||
'with default fileAppender settings': {
|
||||
topic: function() {
|
||||
var testFile = __dirname + '/fa-default-test.log'
|
||||
, logger = log4js.getLogger('default-settings');
|
||||
remove(testFile);
|
||||
log4js.addAppender(log4js.fileAppender(testFile), 'default-settings');
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
fs.readFile(testFile, "utf8", this.callback);
|
||||
},
|
||||
'should write log messages to the file': function(err, fileContents) {
|
||||
assert.include(fileContents, "This should be in the file.\n");
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(fileContents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /);
|
||||
}
|
||||
},
|
||||
'with a max file size and no backups': {
|
||||
topic: function() {
|
||||
var testFile = __dirname + '/fa-maxFileSize-test.log'
|
||||
, logger = log4js.getLogger('max-file-size');
|
||||
remove(testFile);
|
||||
//log file of 50 bytes maximum, no backups, check every 10ms for changes
|
||||
log4js.addAppender(log4js.fileAppender(testFile, log4js.layouts.basicLayout, 50, 0, 0.01), 'max-file-size');
|
||||
logger.info("This is the first log message.");
|
||||
//we have to wait before writing the second one, because node is too fast for the file system.
|
||||
var that = this;
|
||||
setTimeout(function() {
|
||||
logger.info("This is the second log message.");
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 500);
|
||||
},
|
||||
'log file should only contain the second message': function(err, fileContents) {
|
||||
assert.include(fileContents, "This is the second log message.\n");
|
||||
assert.equal(fileContents.indexOf("This is the first log message."), -1);
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'starting with the test file name should be one': function(err, files) {
|
||||
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; });
|
||||
assert.length(logFiles, 1);
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a max file size and 2 backups': {
|
||||
topic: function() {
|
||||
var testFile = __dirname + '/fa-maxFileSize-with-backups-test.log'
|
||||
, logger = log4js.getLogger('max-file-size-backups');
|
||||
remove(testFile);
|
||||
remove(testFile+'.1');
|
||||
remove(testFile+'.2');
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups, check every 10ms for changes
|
||||
log4js.addAppender(log4js.fileAppender(testFile, log4js.layouts.basicLayout, 50, 2, 0.01), 'max-file-size-backups');
|
||||
logger.info("This is the first log message.");
|
||||
//we have to wait before writing the second one, because node is too fast for the file system.
|
||||
var that = this;
|
||||
setTimeout(function() {
|
||||
logger.info("This is the second log message.");
|
||||
}, 200);
|
||||
setTimeout(function() {
|
||||
logger.info("This is the third log message.");
|
||||
}, 400);
|
||||
setTimeout(function() {
|
||||
logger.info("This is the fourth log message.");
|
||||
fs.readdir(__dirname, that.callback);
|
||||
}, 600);
|
||||
},
|
||||
'the log files': {
|
||||
topic: function(files) {
|
||||
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; });
|
||||
return logFiles;
|
||||
},
|
||||
'should be 3': function (files) {
|
||||
assert.length(files, 3);
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
|
||||
},
|
||||
'and the contents of the first file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(logFiles[0], "utf8", this.callback);
|
||||
},
|
||||
'should be the last log message': function(err, contents) {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the second file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(logFiles[1], "utf8", this.callback);
|
||||
},
|
||||
'should be the third log message': function(err, contents) {
|
||||
assert.include(contents, 'This is the third log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the third file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(logFiles[2], "utf8", this.callback);
|
||||
},
|
||||
'should be the second log message': function(err, contents) {
|
||||
assert.include(contents, 'This is the second log message.');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
153
test/logging.js
153
test/logging.js
@ -55,153 +55,6 @@ vows.describe('log4js').addBatch({
|
||||
|
||||
},
|
||||
|
||||
'fileAppender': {
|
||||
topic: function() {
|
||||
var appender
|
||||
, logmessages = []
|
||||
, thing = "thing"
|
||||
, fakeFS = {
|
||||
createWriteStream: function() {
|
||||
assert.equal(arguments[0], './tmp-tests.log');
|
||||
assert.isObject(arguments[1]);
|
||||
assert.equal(arguments[1].flags, 'a');
|
||||
assert.equal(arguments[1].mode, 0644);
|
||||
assert.equal(arguments[1].encoding, 'utf8');
|
||||
return {
|
||||
write: function(message) {
|
||||
logmessages.push(message);
|
||||
}
|
||||
, end: function() {}
|
||||
, destroySoon: function() {}
|
||||
};
|
||||
},
|
||||
watchFile: function() {
|
||||
throw new Error("watchFile should not be called if logSize is not defined");
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS
|
||||
}
|
||||
}
|
||||
);
|
||||
log4js.clearAppenders();
|
||||
|
||||
appender = log4js.fileAppender('./tmp-tests.log', log4js.layouts.messagePassThroughLayout);
|
||||
log4js.addAppender(appender, 'file-test');
|
||||
|
||||
var logger = log4js.getLogger('file-test');
|
||||
logger.debug("this is a test");
|
||||
|
||||
return logmessages;
|
||||
},
|
||||
'should write log messages to file': function(logmessages) {
|
||||
assert.length(logmessages, 1);
|
||||
assert.equal(logmessages, "this is a test\n");
|
||||
}
|
||||
},
|
||||
|
||||
'fileAppender - with rolling based on size and number of files to keep': {
|
||||
topic: function() {
|
||||
var watchCb,
|
||||
filesOpened = [],
|
||||
filesEnded = [],
|
||||
filesRenamed = [],
|
||||
newFilenames = [],
|
||||
existingFiles = ['tests.log'],
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
requires: {
|
||||
'fs': {
|
||||
watchFile: function(file, options, callback) {
|
||||
assert.equal(file, 'tests.log');
|
||||
assert.equal(options.persistent, false);
|
||||
assert.equal(options.interval, 30000);
|
||||
assert.isFunction(callback);
|
||||
watchCb = callback;
|
||||
},
|
||||
createWriteStream: function(file) {
|
||||
assert.equal(file, 'tests.log');
|
||||
filesOpened.push(file);
|
||||
return {
|
||||
end: function() {
|
||||
filesEnded.push(file);
|
||||
}
|
||||
};
|
||||
},
|
||||
statSync: function(file) {
|
||||
if (existingFiles.indexOf(file) < 0) {
|
||||
throw new Error("this file doesn't exist");
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
renameSync: function(oldFile, newFile) {
|
||||
filesRenamed.push(oldFile);
|
||||
existingFiles.push(newFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
var appender = log4js.fileAppender('tests.log', log4js.messagePassThroughLayout, 1024, 2, 30);
|
||||
return [watchCb, filesOpened, filesEnded, filesRenamed, existingFiles];
|
||||
},
|
||||
|
||||
'should close current log file, rename all old ones, open new one on rollover': function(args) {
|
||||
var watchCb = args[0]
|
||||
, filesOpened = args[1]
|
||||
, filesEnded = args[2]
|
||||
, filesRenamed = args[3]
|
||||
, existingFiles = args[4];
|
||||
assert.isFunction(watchCb);
|
||||
//tell the watchCb that the file is below the threshold
|
||||
watchCb({ size: 891 }, { size: 0 });
|
||||
//filesOpened should still be the first one.
|
||||
assert.length(filesOpened, 1);
|
||||
//tell the watchCb that the file is now over the threshold
|
||||
watchCb({ size: 1053 }, { size: 891 });
|
||||
//it should have closed the first log file.
|
||||
assert.length(filesEnded, 1);
|
||||
//it should have renamed the previous log file
|
||||
assert.length(filesRenamed, 1);
|
||||
//and we should have two files now
|
||||
assert.length(existingFiles, 2);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1']);
|
||||
//and opened a new log file.
|
||||
assert.length(filesOpened, 2);
|
||||
|
||||
//now tell the watchCb that we've flipped over the threshold again
|
||||
watchCb({ size: 1025 }, { size: 123 });
|
||||
//it should have closed the old file
|
||||
assert.length(filesEnded, 2);
|
||||
//it should have renamed both the old log file, and the previous '.1' file
|
||||
assert.length(filesRenamed, 3);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log' ]);
|
||||
//it should have renamed 2 more file
|
||||
assert.length(existingFiles, 4);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
|
||||
//and opened a new log file
|
||||
assert.length(filesOpened, 3);
|
||||
|
||||
//tell the watchCb we've flipped again.
|
||||
watchCb({ size: 1024 }, { size: 234 });
|
||||
//close the old one again.
|
||||
assert.length(filesEnded, 3);
|
||||
//it should have renamed the old log file and the 2 backups, with the last one being overwritten.
|
||||
assert.length(filesRenamed, 5);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log', 'tests.log.1', 'tests.log' ]);
|
||||
//it should have renamed 2 more files
|
||||
assert.length(existingFiles, 6);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
|
||||
//and opened a new log file
|
||||
assert.length(filesOpened, 4);
|
||||
}
|
||||
},
|
||||
|
||||
'configure' : {
|
||||
topic: function() {
|
||||
var messages = {}, fakeFS = {
|
||||
@ -217,6 +70,9 @@ vows.describe('log4js').addBatch({
|
||||
, destroySoon: function() {}
|
||||
};
|
||||
},
|
||||
readdirSync: function(dir) {
|
||||
return require('fs').readdirSync(dir);
|
||||
},
|
||||
readFileSync: function(file, encoding) {
|
||||
return require('fs').readFileSync(file, encoding);
|
||||
},
|
||||
@ -407,6 +263,9 @@ vows.describe('log4js').addBatch({
|
||||
logger,
|
||||
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
|
||||
fakeFS = {
|
||||
readdirSync: function(dir) {
|
||||
return require('fs').readdirSync(dir);
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
|
Loading…
Reference in New Issue
Block a user