Compare commits

...

2 Commits

Author SHA1 Message Date
Gareth Jones
9e8e1f76ad Added node0.8 back into travis builds 2013-04-07 14:41:40 +10:00
Gareth Jones
9c5dff382e Backported the old streams. Nasty if statements abound. 2013-04-07 14:32:39 +10:00
15 changed files with 784 additions and 205 deletions

View File

@ -1,3 +1,4 @@
language: node_js
node_js:
- "0.10"
- "0.8"

View File

@ -1,9 +1,10 @@
var streams = require('../streams'),
layouts = require('../layouts'),
path = require('path'),
os = require('os'),
eol = os.EOL || '\n',
openFiles = [];
var semver = require('semver')
, layouts = require('../layouts')
, path = require('path')
, os = require('os')
, eol = os.EOL || '\n'
, openFiles = []
, streams;
//close open files on process exit.
process.on('exit', function() {
@ -20,29 +21,36 @@ process.on('exit', function() {
* @layout layout function for log messages - defaults to basicLayout
*/
function appender(filename, pattern, layout) {
layout = layout || layouts.basicLayout;
layout = layout || layouts.basicLayout;
var logFile;
var logFile = new streams.DateRollingFileStream(filename, pattern);
openFiles.push(logFile);
return function(logEvent) {
logFile.write(layout(logEvent) + eol, "utf8");
};
if (semver.satisfies(process.version, '>=0.10.0')) {
streams = require('../streams');
logFile = new streams.DateRollingFileStream(filename, pattern);
} else {
streams = require('../old-streams');
logFile = new streams.BufferedWriteStream(new streams.DateRollingFileStream(filename, pattern));
}
openFiles.push(logFile);
return function(logEvent) {
logFile.write(layout(logEvent) + eol, "utf8");
};
}
function configure(config, options) {
var layout;
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
return appender(config.filename, config.pattern, layout);
return appender(config.filename, config.pattern, layout);
}
exports.appender = appender;

View File

@ -1,10 +1,10 @@
var layouts = require('../layouts')
, path = require('path')
, fs = require('fs')
, streams = require('../streams')
, os = require('os')
, eol = os.EOL || '\n'
, openFiles = [];
, path = require('path')
, fs = require('fs')
, semver = require('semver')
, os = require('os')
, eol = os.EOL || '\n'
, openFiles = [];
//close open files on process exit.
process.on('exit', function() {
@ -22,7 +22,7 @@ process.on('exit', function() {
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
*/
function fileAppender (file, layout, logSize, numBackups) {
var bytesWritten = 0;
var logFile;
file = path.normalize(file);
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
@ -30,7 +30,9 @@ function fileAppender (file, layout, logSize, numBackups) {
numBackups = numBackups === 0 ? 1 : numBackups;
function openTheStream(file, fileSize, numFiles) {
var stream;
var stream
, streams = require('../streams');
if (fileSize) {
stream = new streams.RollingFileStream(
file,
@ -46,7 +48,31 @@ function fileAppender (file, layout, logSize, numBackups) {
return stream;
}
var logFile = openTheStream(file, logSize, numBackups);
function openTheOldStyleStream(file, fileSize, numFiles) {
var stream
, streams = require('../old-streams');
if (fileSize) {
stream = new streams.BufferedWriteStream(
new streams.RollingFileStream(
file,
fileSize,
numFiles
)
);
} else {
stream = new streams.BufferedWriteStream(fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' }));
}
stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
});
return stream;
}
if (semver.satisfies(process.version, '>=0.10.0')) {
logFile = openTheStream(file, logSize, numBackups);
} else {
logFile = openTheOldStyleStream(file, logSize, numBackups);
}
// push file to the stack of open handlers
openFiles.push(logFile);

View File

@ -0,0 +1,99 @@
var fs = require('fs'),
util = require('util');
function debug(message) {
// console.log(message);
}
module.exports = BaseRollingFileStream;
function BaseRollingFileStream(filename, options) {
debug("In BaseRollingFileStream");
this.filename = filename;
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
this.rolling = false;
this.writesWhileRolling = [];
this.currentSize = 0;
this.rollBeforeWrite = false;
function currentFileSize(file) {
var fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
}
return fileSize;
}
function throwErrorIfArgumentsAreNotValid() {
if (!filename) {
throw new Error("You must specify a filename");
}
}
throwErrorIfArgumentsAreNotValid();
debug("Calling BaseRollingFileStream.super");
BaseRollingFileStream.super_.call(this, this.filename, this.options);
this.currentSize = currentFileSize(this.filename);
}
util.inherits(BaseRollingFileStream, fs.FileWriteStream);
BaseRollingFileStream.prototype.initRolling = function() {
var that = this;
function emptyRollingQueue() {
debug("emptying the rolling queue");
var toWrite;
while ((toWrite = that.writesWhileRolling.shift())) {
BaseRollingFileStream.super_.prototype.write.call(that, toWrite.data, toWrite.encoding);
that.currentSize += toWrite.data.length;
if (that.shouldRoll()) {
that.flush();
return true;
}
}
that.flush();
return false;
}
this.rolling = true;
this.roll(this.filename, function() {
that.currentSize = 0;
that.rolling = emptyRollingQueue();
if (that.rolling) {
process.nextTick(function() { that.initRolling(); });
}
});
};
BaseRollingFileStream.prototype.write = function(data, encoding) {
var canWrite = false;
if (this.rolling) {
this.writesWhileRolling.push({ data: data, encoding: encoding });
} else {
if (this.rollBeforeWrite && this.shouldRoll()) {
this.writesWhileRolling.push({ data: data, encoding: encoding });
this.initRolling();
} else {
canWrite = BaseRollingFileStream.super_.prototype.write.call(this, data, encoding);
this.currentSize += data.length;
debug('current size = ' + this.currentSize);
if (!this.rollBeforeWrite && this.shouldRoll()) {
this.initRolling();
}
}
}
return canWrite;
};
BaseRollingFileStream.prototype.shouldRoll = function() {
return false; // default behaviour is never to roll
};
BaseRollingFileStream.prototype.roll = function(filename, callback) {
callback(); // default behaviour is not to do anything
};

View File

@ -0,0 +1,78 @@
var events = require('events'),
Dequeue = require('dequeue'),
util = require('util');
module.exports = BufferedWriteStream;
function BufferedWriteStream(stream) {
var that = this;
this.stream = stream;
this.buffer = new Dequeue();
this.canWrite = false;
this.bytes = 0;
this.stream.on("open", function() {
that.canWrite = true;
that.flushBuffer();
});
this.stream.on("error", function (err) {
that.emit("error", err);
});
this.stream.on("drain", function() {
that.canWrite = true;
that.flushBuffer();
});
}
util.inherits(BufferedWriteStream, events.EventEmitter);
Object.defineProperty(
BufferedWriteStream.prototype,
"fd",
{
get: function() { return this.stream.fd; },
set: function(newFd) {
this.stream.fd = newFd;
this.bytes = 0;
}
}
);
Object.defineProperty(
BufferedWriteStream.prototype,
"bytesWritten",
{
get: function() { return this.bytes; }
}
);
BufferedWriteStream.prototype.write = function(data, encoding) {
this.buffer.push({ data: data, encoding: encoding });
this.flushBuffer();
};
BufferedWriteStream.prototype.end = function(data, encoding) {
if (data) {
this.buffer.push({ data: data, encoding: encoding });
}
this.flushBufferEvenIfCannotWrite();
};
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
this.bytes += toWrite.data.length;
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
};
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
while (this.buffer.length > 0) {
this.writeToStream(this.buffer.shift());
}
};
BufferedWriteStream.prototype.flushBuffer = function() {
while (this.buffer.length > 0 && this.canWrite) {
this.writeToStream(this.buffer.shift());
}
};

View File

@ -0,0 +1,89 @@
var BaseRollingFileStream = require('./BaseRollingFileStream'),
format = require('../date_format'),
async = require('async'),
fs = require('fs'),
util = require('util');
module.exports = DateRollingFileStream;
function debug(message) {
// console.log(message);
}
function DateRollingFileStream(filename, pattern, options, now) {
debug("Now is " + now);
if (pattern && typeof(pattern) === 'object') {
now = options;
options = pattern;
pattern = null;
}
this.pattern = pattern || '.yyyy-MM-dd';
this.now = now || Date.now;
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
debug("this.now is " + this.now + ", now is " + now);
DateRollingFileStream.super_.call(this, filename, options);
this.rollBeforeWrite = true;
}
util.inherits(DateRollingFileStream, BaseRollingFileStream);
DateRollingFileStream.prototype.shouldRoll = function() {
var lastTime = this.lastTimeWeWroteSomething,
thisTime = format.asString(this.pattern, new Date(this.now()));
debug("DateRollingFileStream.shouldRoll with now = " + this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
this.lastTimeWeWroteSomething = thisTime;
this.previousTime = lastTime;
return thisTime !== lastTime;
};
DateRollingFileStream.prototype.roll = function(filename, callback) {
var that = this,
newFilename = filename + this.previousTime;
debug("Starting roll");
debug("Queueing up data until we've finished rolling");
debug("Flushing underlying stream");
this.flush();
async.series([
deleteAnyExistingFile,
renameTheCurrentFile,
openANewFile
], callback);
function deleteAnyExistingFile(cb) {
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
fs.unlink(newFilename, function (err) {
//ignore err: if we could not delete, it's most likely that it doesn't exist
cb();
});
}
function renameTheCurrentFile(cb) {
debug("Renaming the " + filename + " -> " + newFilename);
fs.rename(filename, newFilename, cb);
}
function openANewFile(cb) {
debug("Opening a new file");
fs.open(
filename,
that.options.flags,
that.options.mode,
function (err, fd) {
debug("opened new file");
var oldLogFileFD = that.fd;
that.fd = fd;
that.writable = true;
fs.close(oldLogFileFD, cb);
}
);
}
};

View File

@ -0,0 +1 @@
These are for pre-0.10.x versions of node and are here just for backwards compatibility. No bug fixes or enhancements will be made to these files.

View File

@ -0,0 +1,110 @@
var BaseRollingFileStream = require('./BaseRollingFileStream'),
util = require('util'),
path = require('path'),
fs = require('fs'),
async = require('async');
function debug(message) {
// util.debug(message);
// console.log(message);
}
module.exports = RollingFileStream;
function RollingFileStream (filename, size, backups, options) {
this.size = size;
this.backups = backups || 1;
function throwErrorIfArgumentsAreNotValid() {
if (!filename || !size || size <= 0) {
throw new Error("You must specify a filename and file size");
}
}
throwErrorIfArgumentsAreNotValid();
RollingFileStream.super_.call(this, filename, options);
}
util.inherits(RollingFileStream, BaseRollingFileStream);
RollingFileStream.prototype.shouldRoll = function() {
return this.currentSize >= this.size;
};
RollingFileStream.prototype.roll = function(filename, callback) {
var that = this,
nameMatcher = new RegExp('^' + path.basename(filename));
function justTheseFiles (item) {
return nameMatcher.test(item);
}
function index(filename_) {
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
}
function byIndex(a, b) {
if (index(a) > index(b)) {
return 1;
} else if (index(a) < index(b) ) {
return -1;
} else {
return 0;
}
}
function increaseFileIndex (fileToRename, cb) {
var idx = index(fileToRename);
debug('Index of ' + fileToRename + ' is ' + idx);
if (idx < that.backups) {
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
fs.unlink(filename + '.' + (idx+1), function (err) {
//ignore err: if we could not delete, it's most likely that it doesn't exist
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
});
} else {
cb();
}
}
function renameTheFiles(cb) {
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug("Renaming the old files");
fs.readdir(path.dirname(filename), function (err, files) {
async.forEachSeries(
files.filter(justTheseFiles).sort(byIndex).reverse(),
increaseFileIndex,
cb
);
});
}
function openANewFile(cb) {
debug("Opening a new file");
fs.open(
filename,
that.options.flags,
that.options.mode,
function (err, fd) {
debug("opened new file");
var oldLogFileFD = that.fd;
that.fd = fd;
that.writable = true;
fs.close(oldLogFileFD, cb);
}
);
}
debug("Starting roll");
debug("Queueing up data until we've finished rolling");
debug("Flushing underlying stream");
this.flush();
async.series([
renameTheFiles,
openANewFile
], callback);
};

3
lib/old-streams/index.js Normal file
View File

@ -0,0 +1,3 @@
exports.BufferedWriteStream = require('./BufferedWriteStream');
exports.RollingFileStream = require('./RollingFileStream');
exports.DateRollingFileStream = require('./DateRollingFileStream');

View File

@ -18,7 +18,7 @@
"url": "http://github.com/nomiddlename/log4js-node/issues"
},
"engines": {
"node": "~0.10"
"node": ">=0.6.0"
},
"scripts": {
"test": "vows"
@ -29,7 +29,8 @@
},
"dependencies": {
"async": "0.1.15",
"dequeue": "1.0.3"
"dequeue": "1.0.3",
"semver": "~1.1.4"
},
"devDependencies": {
"vows": "0.7.0",

View File

@ -1,8 +1,9 @@
var vows = require('vows'),
assert = require('assert'),
path = require('path'),
fs = require('fs'),
log4js = require('../lib/log4js');
assert = require('assert'),
path = require('path'),
fs = require('fs'),
sandbox = require('sandboxed-module'),
log4js = require('../lib/log4js');
function removeFile(filename) {
return function() {
@ -95,4 +96,65 @@ vows.describe('../lib/appenders/dateFile').addBatch({
}
}
}).addBatch({
'with node version less than 0.10': {
topic: function() {
var oldStyleStreamCreated = false
, appender = sandbox.require(
'../lib/appenders/dateFile',
{
globals: {
process: {
version: "v0.8.1",
on: function() {}
}
},
requires: {
'../old-streams': {
BufferedWriteStream: function() {
oldStyleStreamCreated = true;
this.on = function() {};
},
DateRollingFileStream: function() {
this.on = function() {};
}
}
}
}
).appender('cheese.log', null, 1000, 1);
return oldStyleStreamCreated;
},
'should load the old-style streams': function(loaded) {
assert.isTrue(loaded);
}
},
'with node version greater than or equal to 0.10': {
topic: function() {
var oldStyleStreamCreated = false
, appender = sandbox.require(
'../lib/appenders/dateFile',
{
globals: {
process: {
version: "v0.10.1",
on: function() {}
}
},
requires: {
'../streams': {
DateRollingFileStream: function() {
this.on = function() {};
}
}
}
}
).appender('cheese.log', null, 1000, 1);
return oldStyleStreamCreated;
},
'should load the new streams': function(loaded) {
assert.isFalse(loaded);
}
}
}).exportTo(module);

View File

@ -2,6 +2,8 @@ var vows = require('vows')
, fs = require('fs')
, path = require('path')
, log4js = require('../lib/log4js')
, sandbox = require('sandboxed-module')
, semver = require('semver')
, assert = require('assert');
log4js.clearAppenders();
@ -17,9 +19,10 @@ function remove(filename) {
vows.describe('log4js fileAppender').addBatch({
'adding multiple fileAppenders': {
topic: function () {
var listenersCount = process.listeners('exit').length
, logger = log4js.getLogger('default-settings')
, count = 5, logfile;
var listenersCount = process.listeners('exit').length
, logger = log4js.getLogger('default-settings')
, count = 5
, logfile;
while (count--) {
logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
@ -109,12 +112,12 @@ vows.describe('log4js fileAppender').addBatch({
//give the system a chance to open the stream
setTimeout(function() {
fs.readdir(__dirname, function(err, files) {
if (files) {
that.callback(null, files.sort());
} else {
that.callback(err, files);
}
});
if (files) {
that.callback(null, files.sort());
} else {
that.callback(err, files);
}
});
}, 200);
},
'the log files': {
@ -133,7 +136,14 @@ vows.describe('log4js fileAppender').addBatch({
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
},
'should be the last log message': function(contents) {
//there's a difference in behaviour between
//old-style streams and new ones (the new ones are
//correct)
if (semver.satisfies(process.version, ">=0.10.0")) {
assert.include(contents, 'This is the fourth log message.');
} else {
assert.isEmpty(contents);
}
}
},
'and the contents of the second file': {
@ -141,7 +151,14 @@ vows.describe('log4js fileAppender').addBatch({
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
},
'should be the third log message': function(contents) {
//there's a difference in behaviour between
//old-style streams and new ones (the new ones are
//correct)
if (semver.satisfies(process.version, ">=0.10.0")) {
assert.include(contents, 'This is the third log message.');
} else {
assert.include(contents, 'This is the fourth log message.');
}
}
},
'and the contents of the third file': {
@ -149,31 +166,98 @@ vows.describe('log4js fileAppender').addBatch({
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
},
'should be the second log message': function(contents) {
//there's a difference in behaviour between
//old-style streams and new ones (the new ones are
//correct)
if (semver.satisfies(process.version, ">=0.10.0")) {
assert.include(contents, 'This is the second log message.');
} else {
assert.include(contents, 'This is the third log message.');
}
}
}
}
}
}).addBatch({
'configure' : {
'with fileAppender': {
'configure' : {
'with fileAppender': {
topic: function() {
var log4js = require('../lib/log4js')
, logger;
//this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/log4js.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
var log4js = require('../lib/log4js')
, logger;
//this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/log4js.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile('tmp-tests.log', 'utf8', this.callback);
fs.readFile('tmp-tests.log', 'utf8', this.callback);
},
'should load appender configuration from a json file': function(err, contents) {
assert.include(contents, 'this should be written to the file\n');
assert.equal(contents.indexOf('this should not be written to the file'), -1);
assert.include(contents, 'this should be written to the file\n');
assert.equal(contents.indexOf('this should not be written to the file'), -1);
}
}
}
}
}).addBatch({
'with node version less than 0.10': {
topic: function() {
var oldStyleStreamCreated = false
, appender = sandbox.require(
'../lib/appenders/file',
{
globals: {
process: {
version: "v0.8.1",
on: function() {}
}
},
requires: {
'../old-streams': {
BufferedWriteStream: function() {
oldStyleStreamCreated = true;
this.on = function() {};
},
RollingFileStream: function() {
this.on = function() {};
}
}
}
}
).appender('cheese.log', null, 1000, 1);
return oldStyleStreamCreated;
},
'should load the old-style streams': function(loaded) {
assert.isTrue(loaded);
}
},
'with node version greater than or equal to 0.10': {
topic: function() {
var oldStyleStreamCreated = false
, appender = sandbox.require(
'../lib/appenders/file',
{
globals: {
process: {
version: "v0.10.1",
on: function() {}
}
},
requires: {
'../streams': {
RollingFileStream: function() {
this.on = function() {};
}
}
}
}
).appender('cheese.log', null, 1000, 1);
return oldStyleStreamCreated;
},
'should load the new streams': function(loaded) {
assert.isFalse(loaded);
}
}
}).export(module);

View File

@ -56,6 +56,12 @@ vows.describe('log4js-abspath').addBatch({
};
}
}
},
globals: {
process: {
version: "v0.10.1",
on: function() {}
}
}
}
);
@ -66,4 +72,4 @@ vows.describe('log4js-abspath').addBatch({
assert.equal(fileOpened, "/absolute/path/to/whatever.log");
}
},
}).export(module);
}).export(module);

View File

@ -2,7 +2,8 @@ var vows = require('vows')
, assert = require('assert')
, streams = require('stream')
, fs = require('fs')
, DateRollingFileStream = require('../../lib/streams').DateRollingFileStream
, semver = require('semver')
, DateRollingFileStream
, testTime = new Date(2012, 8, 12, 10, 37, 11);
function cleanUp(filename) {
@ -15,108 +16,112 @@ function now() {
return testTime.getTime();
}
vows.describe('DateRollingFileStream').addBatch({
if (semver.satisfies(process.version, '>=0.10.0')) {
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream;
vows.describe('DateRollingFileStream').addBatch({
'arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
'should take a filename and a pattern and return a WritableStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, streams.Writable);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding is not available on the underlying stream
//assert.equal(stream.encoding, 'utf8');
}
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
'should take a filename and a pattern and return a WritableStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, streams.Writable);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding is not available on the underlying stream
//assert.equal(stream.encoding, 'utf8');
}
},
'default arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
'pattern should be .yyyy-MM-dd': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
'pattern should be .yyyy-MM-dd': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
},
'with stream arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-3', 'yyyy-MM-dd', { mode: 0666 }),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666);
}
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-3', 'yyyy-MM-dd', { mode: 0666 }),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666);
}
},
'with stream arguments but no pattern': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-4', { mode: 0666 }),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666);
},
'should use default pattern': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-4', { mode: 0666 }),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666);
},
'should use default pattern': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
},
'with a pattern of .yyyy-MM-dd': {
topic: function() {
var that = this,
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
topic: function() {
var that = this,
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
'should create a file with the base name': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
'should create a file with the base name': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-date-rolling-file-stream-5') > -1; }).length, 2);
}
},
'the file without a date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-date-rolling-file-stream-5') > -1; }).length, 2);
}
},
'the file without a date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
}
}
}).exportTo(module);
}).exportTo(module);
}

View File

@ -4,7 +4,9 @@ var vows = require('vows')
, events = require('events')
, fs = require('fs')
, streams = require('stream')
, RollingFileStream = require('../../lib/streams').RollingFileStream;
, semver = require('semver')
, RollingFileStream;
function remove(filename) {
try {
@ -14,54 +16,57 @@ function remove(filename) {
}
}
vows.describe('RollingFileStream').addBatch({
if (semver.satisfies(process.version, '>=0.10.0')) {
RollingFileStream = require('../../lib/streams').RollingFileStream;
vows.describe('RollingFileStream').addBatch({
'arguments': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream");
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
},
'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) {
assert.instanceOf(stream, streams.Writable);
assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding isn't a property on the underlying stream
//assert.equal(stream.theStream.encoding, 'utf8');
}
topic: function() {
remove(__dirname + "/test-rolling-file-stream");
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
},
'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) {
assert.instanceOf(stream, streams.Writable);
assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding isn't a property on the underlying stream
//assert.equal(stream.theStream.encoding, 'utf8');
}
},
'with stream arguments': {
topic: function() {
remove(__dirname + '/test-rolling-file-stream');
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
},
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666);
}
topic: function() {
remove(__dirname + '/test-rolling-file-stream');
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
},
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666);
}
},
'without size': {
topic: function() {
try {
new RollingFileStream(__dirname + "/test-rolling-file-stream");
} catch (e) {
return e;
}
},
'should throw an error': function(err) {
assert.instanceOf(err, Error);
topic: function() {
try {
new RollingFileStream(__dirname + "/test-rolling-file-stream");
} catch (e) {
return e;
}
},
'should throw an error': function(err) {
assert.instanceOf(err, Error);
}
},
'without number of backups': {
topic: function() {
remove('test-rolling-file-stream');
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
},
'should default to 1 backup': function(stream) {
assert.equal(stream.backups, 1);
}
topic: function() {
remove('test-rolling-file-stream');
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
},
'should default to 1 backup': function(stream) {
assert.equal(stream.backups, 1);
}
},
'writing less than the file size': {
topic: function() {
@ -70,7 +75,7 @@ vows.describe('RollingFileStream').addBatch({
stream.write("cheese", "utf8", function() {
stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
});
});
},
'should write to the file': function(contents) {
assert.equal(contents, "cheese");
@ -89,12 +94,12 @@ vows.describe('RollingFileStream').addBatch({
remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) {
stream.write(i +".cheese\n", "utf8", cb);
async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) {
stream.write(i +".cheese\n", "utf8", cb);
}, function() {
stream.end();
that.callback();
});
stream.end();
that.callback();
});
},
'the number of files': {
topic: function() {
@ -102,8 +107,8 @@ vows.describe('RollingFileStream').addBatch({
},
'should be two': function(files) {
assert.equal(files.filter(
function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }
).length, 2);
function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }
).length, 2);
}
},
'the first file': {
@ -123,4 +128,5 @@ vows.describe('RollingFileStream').addBatch({
}
}
}
}).exportTo(module);
}).exportTo(module);
}