Merged changes

This commit is contained in:
Daniel Bell 2011-10-05 12:27:33 +11:00
commit ff68e46858
5 changed files with 41 additions and 34 deletions

View File

@ -13,6 +13,8 @@ var layouts = require('../layouts')
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
//there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
filePollInterval = filePollInterval * 1000 || 30000;
function setupLogRolling () {
@ -31,25 +33,21 @@ function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
}
function rollThatLog () {
if (numBackups > 0) {
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
for (var i=numBackups; i > 0; i--) {
if (i > 1) {
if (fileExists(file + '.' + (i-1))) {
fs.renameSync(file+'.'+(i-1), file+'.'+i);
}
} else {
fs.renameSync(file, file+'.1');
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
for (var i=numBackups; i > 0; i--) {
if (i > 1) {
if (fileExists(file + '.' + (i-1))) {
fs.renameSync(file+'.'+(i-1), file+'.'+i);
}
} else {
fs.renameSync(file, file+'.1');
}
//let's make a new file
var newLogFileFD = fs.openSync(file, 'a', 0644)
, oldLogFileFD = logFile.fd;
logFile.fd = newLogFileFD;
fs.close(oldLogFileFD);
} else {
fs.truncate(logFile.fd, logSize);
}
//let's make a new file
var newLogFileFD = fs.openSync(file, 'a', 0644)
, oldLogFileFD = logFile.fd;
logFile.fd = newLogFileFD;
fs.close(oldLogFileFD);
}
function fileExists (filename) {

View File

@ -159,7 +159,9 @@ function configureLevels(levels) {
}
} else {
for (l in loggers) {
loggers[l].setLevel();
if (loggers.hasOwnProperty(l)) {
loggers[l].setLevel();
}
}
}
}
@ -323,7 +325,7 @@ function initReloadConfiguration(filename, options) {
function configure (configurationFileOrObject, options) {
var config = configurationFileOrObject;
if (config === undefined || config === null || typeof(config) === 'string') {
options = options || { reloadSecs: 60 };
options = options || { };
if (options.reloadSecs) {
initReloadConfiguration(config, options);
}
@ -355,9 +357,11 @@ function replaceConsole(logger) {
function loadAppenders() {
var appenderList = fs.readdirSync(__dirname + '/appenders');
appenderList.forEach(function(file) {
var appenderModule = require('./appenders/' + file);
module.exports.appenders[appenderModule.name] = appenderModule.appender;
appenderMakers[appenderModule.name] = appenderModule.configure;
if (/\.js$/.test(file)) {
var appenderModule = require('./appenders/' + file);
module.exports.appenders[appenderModule.name] = appenderModule.appender;
appenderMakers[appenderModule.name] = appenderModule.configure;
}
});
}

View File

@ -1,6 +1,6 @@
{
"name": "log4js",
"version": "0.3.5",
"version": "0.3.8",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",

View File

@ -36,17 +36,21 @@ vows.describe('log4js fileAppender').addBatch({
'with a max file size and no backups': {
topic: function() {
var testFile = __dirname + '/fa-maxFileSize-test.log'
, logger = log4js.getLogger('max-file-size');
, logger = log4js.getLogger('max-file-size')
, that = this;
remove(testFile);
//log file of 50 bytes maximum, no backups, check every 10ms for changes
log4js.addAppender(log4js.fileAppender(testFile, log4js.layouts.basicLayout, 50, 0, 0.01), 'max-file-size');
remove(testFile + '.1');
//log file of 100 bytes maximum, no backups, check every 10ms for changes
log4js.addAppender(log4js.fileAppender(testFile, log4js.layouts.basicLayout, 100, 0, 0.01), 'max-file-size');
logger.info("This is the first log message.");
logger.info("This is an intermediate log message.");
//we have to wait before writing the second one, because node is too fast for the file system.
var that = this;
setTimeout(function() {
logger.info("This is the second log message.");
}, 200);
setTimeout(function() {
fs.readFile(testFile, "utf8", that.callback);
}, 500);
}, 400);
},
'log file should only contain the second message': function(err, fileContents) {
assert.include(fileContents, "This is the second log message.\n");
@ -56,9 +60,10 @@ vows.describe('log4js fileAppender').addBatch({
topic: function() {
fs.readdir(__dirname, this.callback);
},
'starting with the test file name should be one': function(err, files) {
'starting with the test file name should be two': function(err, files) {
//there will always be one backup if you've specified a max log size
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; });
assert.length(logFiles, 1);
assert.length(logFiles, 2);
}
}
},

View File

@ -451,7 +451,7 @@ vows.describe('log4js').addBatch({
}
);
log4js.configure();
log4js.configure(undefined, { reloadSecs: 30 });
logger = log4js.getLogger('a-test');
logger.info("info1");
logger.debug("debug2 - should be ignored");