fixed the behaviour of maxlogsize + 0 backups

This commit is contained in:
Gareth Jones 2011-07-26 18:40:41 +10:00
parent 71f9eef6fe
commit 163db0e5fd
3 changed files with 29 additions and 24 deletions

View File

@ -13,6 +13,8 @@ var layouts = require('../layouts')
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
//there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
filePollInterval = filePollInterval * 1000 || 30000;
function setupLogRolling () {
@ -31,25 +33,21 @@ function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
}
function rollThatLog () {
if (numBackups > 0) {
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
for (var i=numBackups; i > 0; i--) {
if (i > 1) {
if (fileExists(file + '.' + (i-1))) {
fs.renameSync(file+'.'+(i-1), file+'.'+i);
}
} else {
fs.renameSync(file, file+'.1');
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
for (var i=numBackups; i > 0; i--) {
if (i > 1) {
if (fileExists(file + '.' + (i-1))) {
fs.renameSync(file+'.'+(i-1), file+'.'+i);
}
} else {
fs.renameSync(file, file+'.1');
}
//let's make a new file
var newLogFileFD = fs.openSync(file, 'a', 0644)
, oldLogFileFD = logFile.fd;
logFile.fd = newLogFileFD;
fs.close(oldLogFileFD);
} else {
fs.truncate(logFile.fd, logSize);
}
//let's make a new file
var newLogFileFD = fs.openSync(file, 'a', 0644)
, oldLogFileFD = logFile.fd;
logFile.fd = newLogFileFD;
fs.close(oldLogFileFD);
}
function fileExists (filename) {

View File

@ -159,7 +159,9 @@ function configureLevels(levels) {
}
} else {
for (l in loggers) {
loggers[l].setLevel();
if (loggers.hasOwnProperty(l)) {
loggers[l].setLevel();
}
}
}
}

View File

@ -36,17 +36,21 @@ vows.describe('log4js fileAppender').addBatch({
'with a max file size and no backups': {
topic: function() {
var testFile = __dirname + '/fa-maxFileSize-test.log'
, logger = log4js.getLogger('max-file-size');
, logger = log4js.getLogger('max-file-size')
, that = this;
remove(testFile);
//log file of 50 bytes maximum, no backups, check every 10ms for changes
log4js.addAppender(log4js.fileAppender(testFile, log4js.layouts.basicLayout, 50, 0, 0.01), 'max-file-size');
remove(testFile + '.1');
//log file of 100 bytes maximum, no backups, check every 10ms for changes
log4js.addAppender(log4js.fileAppender(testFile, log4js.layouts.basicLayout, 100, 0, 0.01), 'max-file-size');
logger.info("This is the first log message.");
logger.info("This is an intermediate log message.");
//we have to wait before writing the second one, because node is too fast for the file system.
var that = this;
setTimeout(function() {
logger.info("This is the second log message.");
}, 200);
setTimeout(function() {
fs.readFile(testFile, "utf8", that.callback);
}, 500);
}, 400);
},
'log file should only contain the second message': function(err, fileContents) {
assert.include(fileContents, "This is the second log message.\n");
@ -56,9 +60,10 @@ vows.describe('log4js fileAppender').addBatch({
topic: function() {
fs.readdir(__dirname, this.callback);
},
'starting with the test file name should be one': function(err, files) {
'starting with the test file name should be two': function(err, files) {
//there will always be one backup if you've specified a max log size
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; });
assert.length(logFiles, 1);
assert.length(logFiles, 2);
}
}
},