added a log rolling function to file appender
This commit is contained in:
parent
4406f211c4
commit
75b9e82cac
@ -342,22 +342,69 @@ module.exports = function (fileSystem, standardOutput, configPaths) {
|
||||
}
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file.
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
|
||||
* @param filePollInterval - the time in seconds between file size checks (default 30s)
|
||||
*/
|
||||
function fileAppender (file, layout) {
|
||||
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
|
||||
layout = layout || basicLayout;
|
||||
//syncs are generally bad, but we need
|
||||
//the file to be open before we start doing any writing.
|
||||
var logFile = fs.openSync(file, 'a', 0644);
|
||||
|
||||
if (logSize > 0) {
|
||||
setupLogRolling(logFile, file, logSize, numBackups || 5, (filePollInterval * 1000) || 30000);
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
fs.write(logFile, layout(loggingEvent)+'\n', null, "utf8");
|
||||
};
|
||||
}
|
||||
|
||||
function setupLogRolling (logFile, filename, logSize, numBackups, filePollInterval) {
|
||||
fs.watchFile(filename,
|
||||
{
|
||||
persistent: false,
|
||||
interval: filePollInterval
|
||||
},
|
||||
function (curr, prev) {
|
||||
if (curr.size >= logSize) {
|
||||
rollThatLog(logFile, filename, numBackups);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function rollThatLog (logFile, filename, numBackups) {
|
||||
//first close the current one.
|
||||
fs.closeSync(logFile);
|
||||
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
|
||||
for (var i=numBackups; i > 0; i--) {
|
||||
if (i > 1) {
|
||||
if (fileExists(filename + '.' + (i-1))) {
|
||||
fs.renameSync(filename+'.'+(i-1), filename+'.'+i);
|
||||
}
|
||||
} else {
|
||||
fs.renameSync(filename, filename+'.1');
|
||||
}
|
||||
}
|
||||
//open it up again
|
||||
logFile = fs.openSync(filename, 'a', 0644);
|
||||
}
|
||||
|
||||
function fileExists (filename) {
|
||||
try {
|
||||
fs.statSync(filename);
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function logLevelFilter (levelString, appender) {
|
||||
var level = Level.toLevel(levelString);
|
||||
return function(logEvent) {
|
||||
|
@ -61,6 +61,9 @@ vows.describe('log4js').addBatch({
|
||||
assert.isNull(arguments[2]);
|
||||
assert.equal(arguments[3], "utf8");
|
||||
logmessages.push(arguments[1]);
|
||||
},
|
||||
watchFile: function() {
|
||||
throw new Error("watchFile should not be called if logSize is not defined");
|
||||
}
|
||||
},
|
||||
log4js = require('../lib/log4js')(fakeFS);
|
||||
@ -91,7 +94,8 @@ vows.describe('log4js').addBatch({
|
||||
log4js = require('../lib/log4js')({
|
||||
watchFile: function(file, options, callback) {
|
||||
assert.equal(file, 'tests.log');
|
||||
assert.deepEqual(options, { persistent: true, interval: 30000 });
|
||||
assert.equal(options.persistent, false);
|
||||
assert.equal(options.interval, 30000);
|
||||
assert.isFunction(callback);
|
||||
watchCb = callback;
|
||||
},
|
||||
@ -146,10 +150,10 @@ vows.describe('log4js').addBatch({
|
||||
assert.length(filesClosed, 2);
|
||||
//it should have renamed both the old log file, and the previous '.1' file
|
||||
assert.length(filesRenamed, 3);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log', 'tests.log.1' ]);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log' ]);
|
||||
//it should have renamed 2 more file
|
||||
assert.length(existingFiles, 2);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.2', 'tests.log.1']);
|
||||
assert.length(existingFiles, 4);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
|
||||
//and opened a new log file
|
||||
assert.length(filesOpened, 3);
|
||||
|
||||
@ -159,10 +163,10 @@ vows.describe('log4js').addBatch({
|
||||
assert.length(filesClosed, 3);
|
||||
//it should have renamed the old log file and the 2 backups, with the last one being overwritten.
|
||||
assert.length(filesRenamed, 5);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log', 'tests.log.1', 'tests.log', 'tests.log.1' ]);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log', 'tests.log.1', 'tests.log' ]);
|
||||
//it should have renamed 2 more files
|
||||
assert.length(existingFiles, 5);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.2', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
|
||||
assert.length(existingFiles, 6);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
|
||||
//and opened a new log file
|
||||
assert.length(filesOpened, 4);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user