Merge branch 'master' of https://github.com/mayconbordin/log4js-node into mayconbordin-master
This commit is contained in:
commit
4648e7a5e6
187
lib/appenders/fileSync.js
Executable file
187
lib/appenders/fileSync.js
Executable file
@ -0,0 +1,187 @@
|
|||||||
|
"use strict";
|
||||||
|
var debug = require('../debug')('fileSync')
|
||||||
|
, layouts = require('../layouts')
|
||||||
|
, path = require('path')
|
||||||
|
, fs = require('fs')
|
||||||
|
, streams = require('../streams')
|
||||||
|
, os = require('os')
|
||||||
|
, eol = os.EOL || '\n'
|
||||||
|
;
|
||||||
|
|
||||||
|
function RollingFileSync (filename, size, backups, options) {
|
||||||
|
debug("In RollingFileStream");
|
||||||
|
|
||||||
|
function throwErrorIfArgumentsAreNotValid() {
|
||||||
|
if (!filename || !size || size <= 0) {
|
||||||
|
throw new Error("You must specify a filename and file size");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throwErrorIfArgumentsAreNotValid();
|
||||||
|
|
||||||
|
this.filename = filename;
|
||||||
|
this.size = size;
|
||||||
|
this.backups = backups || 1;
|
||||||
|
this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
|
||||||
|
this.currentSize = 0;
|
||||||
|
|
||||||
|
function currentFileSize(file) {
|
||||||
|
var fileSize = 0;
|
||||||
|
try {
|
||||||
|
fileSize = fs.statSync(file).size;
|
||||||
|
} catch (e) {
|
||||||
|
// file does not exist
|
||||||
|
fs.appendFileSync(filename, '');
|
||||||
|
}
|
||||||
|
return fileSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.currentSize = currentFileSize(this.filename);
|
||||||
|
}
|
||||||
|
|
||||||
|
RollingFileSync.prototype.shouldRoll = function() {
|
||||||
|
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
|
||||||
|
return this.currentSize >= this.size;
|
||||||
|
};
|
||||||
|
|
||||||
|
RollingFileSync.prototype.roll = function(filename) {
|
||||||
|
var that = this,
|
||||||
|
nameMatcher = new RegExp('^' + path.basename(filename));
|
||||||
|
|
||||||
|
function justTheseFiles (item) {
|
||||||
|
return nameMatcher.test(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
function index(filename_) {
|
||||||
|
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function byIndex(a, b) {
|
||||||
|
if (index(a) > index(b)) {
|
||||||
|
return 1;
|
||||||
|
} else if (index(a) < index(b) ) {
|
||||||
|
return -1;
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function increaseFileIndex (fileToRename) {
|
||||||
|
var idx = index(fileToRename);
|
||||||
|
debug('Index of ' + fileToRename + ' is ' + idx);
|
||||||
|
if (idx < that.backups) {
|
||||||
|
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||||
|
//so, we'll try to delete the file we're renaming to first
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(filename + '.' + (idx+1));
|
||||||
|
} catch(e) {
|
||||||
|
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||||
|
}
|
||||||
|
|
||||||
|
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||||
|
fs.renameSync(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function renameTheFiles() {
|
||||||
|
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||||
|
debug("Renaming the old files");
|
||||||
|
|
||||||
|
var files = fs.readdirSync(path.dirname(filename));
|
||||||
|
files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug("Rolling, rolling, rolling");
|
||||||
|
renameTheFiles();
|
||||||
|
};
|
||||||
|
|
||||||
|
RollingFileSync.prototype.write = function(chunk, encoding) {
|
||||||
|
var that = this;
|
||||||
|
|
||||||
|
|
||||||
|
function writeTheChunk() {
|
||||||
|
debug("writing the chunk to the file");
|
||||||
|
that.currentSize += chunk.length;
|
||||||
|
fs.appendFileSync(that.filename, chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug("in write");
|
||||||
|
|
||||||
|
|
||||||
|
if (this.shouldRoll()) {
|
||||||
|
this.currentSize = 0;
|
||||||
|
this.roll(this.filename);
|
||||||
|
}
|
||||||
|
|
||||||
|
writeTheChunk();
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||||
|
*
|
||||||
|
* @param file file log messages will be written to
|
||||||
|
* @param layout a function that takes a logevent and returns a string
|
||||||
|
* (defaults to basicLayout).
|
||||||
|
* @param logSize - the maximum size (in bytes) for a log file,
|
||||||
|
* if not provided then logs won't be rotated.
|
||||||
|
* @param numBackups - the number of log files to keep after logSize
|
||||||
|
* has been reached (default 5)
|
||||||
|
*/
|
||||||
|
function fileAppender (file, layout, logSize, numBackups) {
|
||||||
|
debug("fileSync appender created");
|
||||||
|
var bytesWritten = 0;
|
||||||
|
file = path.normalize(file);
|
||||||
|
layout = layout || layouts.basicLayout;
|
||||||
|
numBackups = numBackups === undefined ? 5 : numBackups;
|
||||||
|
//there has to be at least one backup if logSize has been specified
|
||||||
|
numBackups = numBackups === 0 ? 1 : numBackups;
|
||||||
|
|
||||||
|
function openTheStream(file, fileSize, numFiles) {
|
||||||
|
var stream;
|
||||||
|
|
||||||
|
if (fileSize) {
|
||||||
|
stream = new RollingFileSync(
|
||||||
|
file,
|
||||||
|
fileSize,
|
||||||
|
numFiles
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
stream = (function(f) {
|
||||||
|
// create file if it doesn't exist
|
||||||
|
if (!fs.existsSync(f))
|
||||||
|
fs.appendFileSync(f, '');
|
||||||
|
|
||||||
|
return {
|
||||||
|
write: function(data) {
|
||||||
|
fs.appendFileSync(f, data);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
})(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
var logFile = openTheStream(file, logSize, numBackups);
|
||||||
|
|
||||||
|
return function(loggingEvent) {
|
||||||
|
logFile.write(layout(loggingEvent) + eol);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function configure(config, options) {
|
||||||
|
var layout;
|
||||||
|
if (config.layout) {
|
||||||
|
layout = layouts.layout(config.layout.type, config.layout);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options && options.cwd && !config.absolute) {
|
||||||
|
config.filename = path.join(options.cwd, config.filename);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.appender = fileAppender;
|
||||||
|
exports.configure = configure;
|
181
test/fileSyncAppender-test.js
Executable file
181
test/fileSyncAppender-test.js
Executable file
@ -0,0 +1,181 @@
|
|||||||
|
"use strict";
|
||||||
|
var vows = require('vows')
|
||||||
|
, fs = require('fs')
|
||||||
|
, path = require('path')
|
||||||
|
, sandbox = require('sandboxed-module')
|
||||||
|
, log4js = require('../lib/log4js')
|
||||||
|
, assert = require('assert');
|
||||||
|
|
||||||
|
log4js.clearAppenders();
|
||||||
|
|
||||||
|
function remove(filename) {
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(filename);
|
||||||
|
} catch (e) {
|
||||||
|
//doesn't really matter if it failed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
vows.describe('log4js fileSyncAppender').addBatch({
|
||||||
|
'with default fileSyncAppender settings': {
|
||||||
|
topic: function() {
|
||||||
|
var that = this
|
||||||
|
, testFile = path.join(__dirname, '/fa-default-sync-test.log')
|
||||||
|
, logger = log4js.getLogger('default-settings');
|
||||||
|
remove(testFile);
|
||||||
|
|
||||||
|
log4js.clearAppenders();
|
||||||
|
log4js.addAppender(require('../lib/appenders/fileSync').appender(testFile), 'default-settings');
|
||||||
|
|
||||||
|
logger.info("This should be in the file.");
|
||||||
|
|
||||||
|
fs.readFile(testFile, "utf8", that.callback);
|
||||||
|
},
|
||||||
|
'should write log messages to the file': function(err, fileContents) {
|
||||||
|
assert.include(fileContents, "This should be in the file.\n");
|
||||||
|
},
|
||||||
|
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||||
|
assert.match(
|
||||||
|
fileContents,
|
||||||
|
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'with a max file size and no backups': {
|
||||||
|
topic: function() {
|
||||||
|
var testFile = path.join(__dirname, '/fa-maxFileSize-sync-test.log')
|
||||||
|
, logger = log4js.getLogger('max-file-size')
|
||||||
|
, that = this;
|
||||||
|
remove(testFile);
|
||||||
|
remove(testFile + '.1');
|
||||||
|
//log file of 100 bytes maximum, no backups
|
||||||
|
log4js.clearAppenders();
|
||||||
|
log4js.addAppender(
|
||||||
|
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||||
|
'max-file-size'
|
||||||
|
);
|
||||||
|
logger.info("This is the first log message.");
|
||||||
|
logger.info("This is an intermediate log message.");
|
||||||
|
logger.info("This is the second log message.");
|
||||||
|
|
||||||
|
fs.readFile(testFile, "utf8", that.callback);
|
||||||
|
},
|
||||||
|
'log file should only contain the second message': function(err, fileContents) {
|
||||||
|
assert.include(fileContents, "This is the second log message.\n");
|
||||||
|
assert.equal(fileContents.indexOf("This is the first log message."), -1);
|
||||||
|
},
|
||||||
|
'the number of files': {
|
||||||
|
topic: function() {
|
||||||
|
fs.readdir(__dirname, this.callback);
|
||||||
|
},
|
||||||
|
'starting with the test file name should be two': function(err, files) {
|
||||||
|
//there will always be one backup if you've specified a max log size
|
||||||
|
var logFiles = files.filter(
|
||||||
|
function(file) { return file.indexOf('fa-maxFileSize-sync-test.log') > -1; }
|
||||||
|
);
|
||||||
|
assert.equal(logFiles.length, 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'with a max file size and 2 backups': {
|
||||||
|
topic: function() {
|
||||||
|
var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-sync-test.log')
|
||||||
|
, logger = log4js.getLogger('max-file-size-backups');
|
||||||
|
remove(testFile);
|
||||||
|
remove(testFile+'.1');
|
||||||
|
remove(testFile+'.2');
|
||||||
|
|
||||||
|
//log file of 50 bytes maximum, 2 backups
|
||||||
|
log4js.clearAppenders();
|
||||||
|
log4js.addAppender(
|
||||||
|
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||||
|
'max-file-size-backups'
|
||||||
|
);
|
||||||
|
logger.info("This is the first log message.");
|
||||||
|
logger.info("This is the second log message.");
|
||||||
|
logger.info("This is the third log message.");
|
||||||
|
logger.info("This is the fourth log message.");
|
||||||
|
var that = this;
|
||||||
|
|
||||||
|
fs.readdir(__dirname, function(err, files) {
|
||||||
|
if (files) {
|
||||||
|
that.callback(null, files.sort());
|
||||||
|
} else {
|
||||||
|
that.callback(err, files);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
},
|
||||||
|
'the log files': {
|
||||||
|
topic: function(files) {
|
||||||
|
var logFiles = files.filter(
|
||||||
|
function(file) { return file.indexOf('fa-maxFileSize-with-backups-sync-test.log') > -1; }
|
||||||
|
);
|
||||||
|
return logFiles;
|
||||||
|
},
|
||||||
|
'should be 3': function (files) {
|
||||||
|
assert.equal(files.length, 3);
|
||||||
|
},
|
||||||
|
'should be named in sequence': function (files) {
|
||||||
|
assert.deepEqual(files, [
|
||||||
|
'fa-maxFileSize-with-backups-sync-test.log',
|
||||||
|
'fa-maxFileSize-with-backups-sync-test.log.1',
|
||||||
|
'fa-maxFileSize-with-backups-sync-test.log.2'
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
'and the contents of the first file': {
|
||||||
|
topic: function(logFiles) {
|
||||||
|
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||||
|
},
|
||||||
|
'should be the last log message': function(contents) {
|
||||||
|
assert.include(contents, 'This is the fourth log message.');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'and the contents of the second file': {
|
||||||
|
topic: function(logFiles) {
|
||||||
|
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
|
||||||
|
},
|
||||||
|
'should be the third log message': function(contents) {
|
||||||
|
assert.include(contents, 'This is the third log message.');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'and the contents of the third file': {
|
||||||
|
topic: function(logFiles) {
|
||||||
|
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
|
||||||
|
},
|
||||||
|
'should be the second log message': function(contents) {
|
||||||
|
assert.include(contents, 'This is the second log message.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).addBatch({
|
||||||
|
'configure' : {
|
||||||
|
'with fileSyncAppender': {
|
||||||
|
topic: function() {
|
||||||
|
var log4js = require('../lib/log4js')
|
||||||
|
, logger;
|
||||||
|
//this config defines one file appender (to ./tmp-sync-tests.log)
|
||||||
|
//and sets the log level for "tests" to WARN
|
||||||
|
log4js.configure({
|
||||||
|
appenders: [{
|
||||||
|
category: "tests",
|
||||||
|
type: "file",
|
||||||
|
filename: "tmp-sync-tests.log",
|
||||||
|
layout: { type: "messagePassThrough" }
|
||||||
|
}],
|
||||||
|
|
||||||
|
levels: { tests: "WARN" }
|
||||||
|
});
|
||||||
|
logger = log4js.getLogger('tests');
|
||||||
|
logger.info('this should not be written to the file');
|
||||||
|
logger.warn('this should be written to the file');
|
||||||
|
|
||||||
|
fs.readFile('tmp-sync-tests.log', 'utf8', this.callback);
|
||||||
|
},
|
||||||
|
'should load appender configuration from a json file': function(err, contents) {
|
||||||
|
assert.include(contents, 'this should be written to the file\n');
|
||||||
|
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).export(module);
|
Loading…
Reference in New Issue
Block a user