Fixes for version v0.10 streams, breaks log4js for older versions of node

This commit is contained in:
Gareth Jones 2013-03-20 09:14:27 +11:00
parent 5e242c9dc9
commit 65e490cbd2
17 changed files with 220 additions and 485 deletions

View File

@ -1,4 +1,3 @@
language: node_js
node_js:
- 0.6
- 0.8
- 0.10

View File

@ -1,4 +1,4 @@
var log4js = require('./lib/log4js');
var log4js = require('../lib/log4js');
//log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: [

View File

@ -1,4 +1,4 @@
var log4js = require('./lib/log4js')
var log4js = require('../lib/log4js')
, log
, i = 0;
log4js.configure({

View File

@ -22,7 +22,7 @@ process.on('exit', function() {
function appender(filename, pattern, layout) {
layout = layout || layouts.basicLayout;
var logFile = new streams.BufferedWriteStream(new streams.DateRollingFileStream(filename, pattern));
var logFile = new streams.DateRollingFileStream(filename, pattern);
openFiles.push(logFile);
return function(logEvent) {

View File

@ -22,53 +22,51 @@ process.on('exit', function() {
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
*/
function fileAppender (file, layout, logSize, numBackups) {
var bytesWritten = 0;
file = path.normalize(file);
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
//there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
var bytesWritten = 0;
file = path.normalize(file);
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
//there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
function openTheStream(file, fileSize, numFiles) {
var stream;
if (fileSize) {
stream = new streams.BufferedWriteStream(
new streams.RollingFileStream(
file,
fileSize,
numFiles
)
);
} else {
stream = new streams.BufferedWriteStream(fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' }));
}
stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
});
return stream;
function openTheStream(file, fileSize, numFiles) {
var stream;
if (fileSize) {
stream = new streams.RollingFileStream(
file,
fileSize,
numFiles
);
} else {
stream = fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' });
}
stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
});
return stream;
}
var logFile = openTheStream(file, logSize, numBackups);
var logFile = openTheStream(file, logSize, numBackups);
// push file to the stack of open handlers
openFiles.push(logFile);
// push file to the stack of open handlers
openFiles.push(logFile);
return function(loggingEvent) {
logFile.write(layout(loggingEvent) + eol, "utf8");
};
return function(loggingEvent) {
logFile.write(layout(loggingEvent) + eol, "utf8");
};
}
function configure(config, options) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
}
exports.appender = fileAppender;

View File

@ -1,5 +1,6 @@
var fs = require('fs'),
util = require('util');
stream = require('stream'),
util = require('util');
function debug(message) {
// console.log(message);
@ -8,85 +9,64 @@ function debug(message) {
module.exports = BaseRollingFileStream;
function BaseRollingFileStream(filename, options) {
debug("In BaseRollingFileStream");
this.filename = filename;
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
this.currentSize = 0;
debug("In BaseRollingFileStream");
this.filename = filename;
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
this.rolling = false;
this.writesWhileRolling = [];
this.currentSize = 0;
this.rollBeforeWrite = false;
function currentFileSize(file) {
var fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
}
return fileSize;
function currentFileSize(file) {
var fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
}
return fileSize;
}
function throwErrorIfArgumentsAreNotValid() {
if (!filename) {
throw new Error("You must specify a filename");
}
function throwErrorIfArgumentsAreNotValid() {
if (!filename) {
throw new Error("You must specify a filename");
}
}
throwErrorIfArgumentsAreNotValid();
debug("Calling BaseRollingFileStream.super");
BaseRollingFileStream.super_.call(this, this.filename, this.options);
this.currentSize = currentFileSize(this.filename);
throwErrorIfArgumentsAreNotValid();
debug("Calling BaseRollingFileStream.super");
BaseRollingFileStream.super_.call(this);
this.openTheStream();
this.currentSize = currentFileSize(this.filename);
}
util.inherits(BaseRollingFileStream, fs.FileWriteStream);
util.inherits(BaseRollingFileStream, stream.Writable);
BaseRollingFileStream.prototype.initRolling = function() {
var that = this;
BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
var that = this;
function writeTheChunk() {
debug("writing the chunk to the underlying stream");
that.currentSize += chunk.length;
that.theStream.write(chunk, encoding, callback);
}
function emptyRollingQueue() {
debug("emptying the rolling queue");
var toWrite;
while ((toWrite = that.writesWhileRolling.shift())) {
BaseRollingFileStream.super_.prototype.write.call(that, toWrite.data, toWrite.encoding);
that.currentSize += toWrite.data.length;
if (that.shouldRoll()) {
that.flush();
return true;
}
}
that.flush();
return false;
}
debug("in _write");
this.rolling = true;
this.roll(this.filename, function() {
that.currentSize = 0;
that.rolling = emptyRollingQueue();
if (that.rolling) {
process.nextTick(function() { that.initRolling(); });
}
});
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename, writeTheChunk);
} else {
writeTheChunk();
}
};
BaseRollingFileStream.prototype.write = function(data, encoding) {
var canWrite = false;
if (this.rolling) {
this.writesWhileRolling.push({ data: data, encoding: encoding });
} else {
if (this.rollBeforeWrite && this.shouldRoll()) {
this.writesWhileRolling.push({ data: data, encoding: encoding });
this.initRolling();
} else {
canWrite = BaseRollingFileStream.super_.prototype.write.call(this, data, encoding);
this.currentSize += data.length;
debug('current size = ' + this.currentSize);
BaseRollingFileStream.prototype.openTheStream = function(cb) {
debug("opening the underlying stream");
this.theStream = fs.createWriteStream(this.filename, this.options);
if (cb) {
this.theStream.on("open", cb);
}
};
if (!this.rollBeforeWrite && this.shouldRoll()) {
this.initRolling();
}
}
}
return canWrite;
BaseRollingFileStream.prototype.closeTheStream = function(cb) {
debug("closing the underlying stream");
this.theStream.end(null, null, cb);
};
BaseRollingFileStream.prototype.shouldRoll = function() {

View File

@ -1,78 +0,0 @@
var events = require('events'),
Dequeue = require('dequeue'),
util = require('util');
module.exports = BufferedWriteStream;
function BufferedWriteStream(stream) {
var that = this;
this.stream = stream;
this.buffer = new Dequeue();
this.canWrite = false;
this.bytes = 0;
this.stream.on("open", function() {
that.canWrite = true;
that.flushBuffer();
});
this.stream.on("error", function (err) {
that.emit("error", err);
});
this.stream.on("drain", function() {
that.canWrite = true;
that.flushBuffer();
});
}
util.inherits(BufferedWriteStream, events.EventEmitter);
Object.defineProperty(
BufferedWriteStream.prototype,
"fd",
{
get: function() { return this.stream.fd; },
set: function(newFd) {
this.stream.fd = newFd;
this.bytes = 0;
}
}
);
Object.defineProperty(
BufferedWriteStream.prototype,
"bytesWritten",
{
get: function() { return this.bytes; }
}
);
BufferedWriteStream.prototype.write = function(data, encoding) {
this.buffer.push({ data: data, encoding: encoding });
this.flushBuffer();
};
BufferedWriteStream.prototype.end = function(data, encoding) {
if (data) {
this.buffer.push({ data: data, encoding: encoding });
}
this.flushBufferEvenIfCannotWrite();
};
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
this.bytes += toWrite.data.length;
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
};
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
while (this.buffer.length > 0) {
this.writeToStream(this.buffer.shift());
}
};
BufferedWriteStream.prototype.flushBuffer = function() {
while (this.buffer.length > 0 && this.canWrite) {
this.writeToStream(this.buffer.shift());
}
};

View File

@ -23,9 +23,7 @@ function DateRollingFileStream(filename, pattern, options, now) {
debug("this.now is " + this.now + ", now is " + now);
DateRollingFileStream.super_.call(this, filename, options);
this.rollBeforeWrite = true;
}
util.inherits(DateRollingFileStream, BaseRollingFileStream);
DateRollingFileStream.prototype.shouldRoll = function() {
@ -45,14 +43,12 @@ DateRollingFileStream.prototype.roll = function(filename, callback) {
newFilename = filename + this.previousTime;
debug("Starting roll");
debug("Queueing up data until we've finished rolling");
debug("Flushing underlying stream");
this.flush();
async.series([
deleteAnyExistingFile,
renameTheCurrentFile,
openANewFile
this.closeTheStream.bind(this),
deleteAnyExistingFile,
renameTheCurrentFile,
this.openTheStream.bind(this)
], callback);
function deleteAnyExistingFile(cb) {
@ -69,21 +65,4 @@ DateRollingFileStream.prototype.roll = function(filename, callback) {
fs.rename(filename, newFilename, cb);
}
function openANewFile(cb) {
debug("Opening a new file");
fs.open(
filename,
that.options.flags,
that.options.mode,
function (err, fd) {
debug("opened new file");
var oldLogFileFD = that.fd;
that.fd = fd;
that.writable = true;
fs.close(oldLogFileFD, cb);
}
);
}
};

View File

@ -4,9 +4,9 @@ var BaseRollingFileStream = require('./BaseRollingFileStream'),
fs = require('fs'),
async = require('async');
function debug(message) {
function debug() {
// util.debug(message);
// console.log(message);
// console.log.apply(console, arguments);
}
module.exports = RollingFileStream;
@ -28,6 +28,7 @@ function RollingFileStream (filename, size, backups, options) {
util.inherits(RollingFileStream, BaseRollingFileStream);
RollingFileStream.prototype.shouldRoll = function() {
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
return this.currentSize >= this.size;
};
@ -81,30 +82,11 @@ RollingFileStream.prototype.roll = function(filename, callback) {
});
}
function openANewFile(cb) {
debug("Opening a new file");
fs.open(
filename,
that.options.flags,
that.options.mode,
function (err, fd) {
debug("opened new file");
var oldLogFileFD = that.fd;
that.fd = fd;
that.writable = true;
fs.close(oldLogFileFD, cb);
}
);
}
debug("Starting roll");
debug("Queueing up data until we've finished rolling");
debug("Flushing underlying stream");
this.flush();
async.series([
renameTheFiles,
openANewFile
], callback);
debug("Rolling, rolling, rolling");
async.series([
this.closeTheStream.bind(this),
renameTheFiles,
this.openTheStream.bind(this)
], callback);
};

View File

@ -1,3 +1,2 @@
exports.BufferedWriteStream = require('./BufferedWriteStream');
exports.RollingFileStream = require('./RollingFileStream');
exports.DateRollingFileStream = require('./DateRollingFileStream');

View File

@ -1,6 +1,6 @@
{
"name": "log4js",
"version": "0.5.7",
"version": "0.6.0",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
@ -17,7 +17,7 @@
"bugs": {
"url": "http://github.com/nomiddlename/log4js-node/issues"
},
"engines": [ "node >=0.6" ],
"engines": [ "node >=0.10" ],
"scripts": {
"test": "vows"
},
@ -30,7 +30,7 @@
"dequeue": "1.0.3"
},
"devDependencies": {
"vows": "0.6.2",
"vows": "0.7.0",
"sandboxed-module": "0.1.3",
"hook.io": "0.8.10",
"underscore": "1.2.1"

View File

@ -5,13 +5,13 @@ var vows = require('vows'),
log4js = require('../lib/log4js');
function removeFile(filename) {
return function() {
fs.unlink(path.join(__dirname, filename), function(err) {
if (err) {
console.log("Could not delete ", filename, err);
}
});
};
return function() {
fs.unlink(path.join(__dirname, filename), function(err) {
if (err) {
console.log("Could not delete ", filename, err);
}
});
};
}
vows.describe('../lib/appenders/dateFile').addBatch({

View File

@ -108,7 +108,13 @@ vows.describe('log4js fileAppender').addBatch({
var that = this;
//give the system a chance to open the stream
setTimeout(function() {
fs.readdir(__dirname, that.callback);
fs.readdir(__dirname, function(err, files) {
if (files) {
that.callback(null, files.sort());
} else {
that.callback(err, files);
}
});
}, 200);
},
'the log files': {
@ -120,31 +126,31 @@ vows.describe('log4js fileAppender').addBatch({
assert.equal(files.length, 3);
},
'should be named in sequence': function (files) {
assert.deepEqual(files.sort(), ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
assert.deepEqual(files, ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
},
'and the contents of the first file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
},
'should be empty because the last log message triggers rolling': function(contents) {
assert.isEmpty(contents);
}
},
'and the contents of the second file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
},
'should be the last log message': function(contents) {
assert.include(contents, 'This is the fourth log message.');
}
},
'and the contents of the third file': {
'and the contents of the second file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
},
'should be the third log message': function(contents) {
assert.include(contents, 'This is the third log message.');
}
},
'and the contents of the third file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
},
'should be the second log message': function(contents) {
assert.include(contents, 'This is the second log message.');
}
}
}
}

View File

@ -50,9 +50,10 @@ vows.describe('log4js-abspath').addBatch({
{
RollingFileStream: function(file) {
fileOpened = file;
},
BufferedWriteStream: function(other) {
return { on: function() { }, end: function() {} }
return {
on: function() {},
end: function() {}
};
}
}
}

View File

@ -1,8 +1,9 @@
var vows = require('vows'),
assert = require('assert'),
fs = require('fs'),
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream,
testTime = new Date(2012, 8, 12, 10, 37, 11);
var vows = require('vows')
, assert = require('assert')
, streams = require('stream')
, fs = require('fs')
, DateRollingFileStream = require('../../lib/streams').DateRollingFileStream
, testTime = new Date(2012, 8, 12, 10, 37, 11);
function cleanUp(filename) {
return function() {
@ -19,15 +20,16 @@ vows.describe('DateRollingFileStream').addBatch({
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
'should take a filename and a pattern and return a FileWriteStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, fs.FileWriteStream);
'should take a filename and a pattern and return a WritableStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, streams.Writable);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.mode, 420);
assert.equal(stream.flags, 'a');
assert.equal(stream.encoding, 'utf8');
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding is not available on the underlying stream
//assert.equal(stream.encoding, 'utf8');
}
},
@ -45,7 +47,7 @@ vows.describe('DateRollingFileStream').addBatch({
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.mode, 0666);
assert.equal(stream.theStream.mode, 0666);
}
},
@ -54,7 +56,7 @@ vows.describe('DateRollingFileStream').addBatch({
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.mode, 0666);
assert.equal(stream.theStream.mode, 0666);
},
'should use default pattern': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
@ -63,13 +65,11 @@ vows.describe('DateRollingFileStream').addBatch({
'with a pattern of .yyyy-MM-dd': {
topic: function() {
var that = this,
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
stream.on("open", function() {
stream.write("First message\n");
//wait for the file system to catch up with us
that.callback(null, stream);
});
var that = this,
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
@ -84,9 +84,8 @@ vows.describe('DateRollingFileStream').addBatch({
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n");
setTimeout(this.callback, 100);
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),

View File

@ -1,130 +0,0 @@
var vows = require('vows')
, assert = require('assert')
, events = require('events')
, BufferedWriteStream = require('../../lib/streams').BufferedWriteStream;
function FakeStream() {
this.writes = [];
this.canWrite = false;
this.callbacks = {};
}
FakeStream.prototype.on = function(event, callback) {
this.callbacks[event] = callback;
}
FakeStream.prototype.write = function(data, encoding) {
assert.equal("utf8", encoding);
this.writes.push(data);
return this.canWrite;
}
FakeStream.prototype.emit = function(event, payload) {
this.callbacks[event](payload);
}
FakeStream.prototype.block = function() {
this.canWrite = false;
}
FakeStream.prototype.unblock = function() {
this.canWrite = true;
this.emit("drain");
}
vows.describe('BufferedWriteStream').addBatch({
'stream': {
topic: new BufferedWriteStream(new FakeStream()),
'should take a stream as an argument and return a stream': function(stream) {
assert.instanceOf(stream, events.EventEmitter);
}
},
'before stream is open': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
stream.write("Some data", "utf8");
stream.write("Some more data", "utf8");
return fakeStream.writes;
},
'should buffer writes': function(writes) {
assert.equal(writes.length, 0);
}
},
'when stream is open': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
stream.write("Some data", "utf8");
fakeStream.canWrite = true;
fakeStream.emit("open");
stream.write("Some more data", "utf8");
return fakeStream.writes;
},
'should write data to stream from before stream was open': function (writes) {
assert.equal(writes[0], "Some data");
},
'should write data to stream from after stream was open': function (writes) {
assert.equal(writes[1], "Some more data");
}
},
'when stream is blocked': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
fakeStream.emit("open");
fakeStream.block();
stream.write("will not know it is blocked until first write", "utf8");
stream.write("so this one will be buffered, but not the previous one", "utf8");
return fakeStream.writes;
},
'should buffer writes': function (writes) {
assert.equal(writes.length, 1);
assert.equal(writes[0], "will not know it is blocked until first write");
}
},
'when stream is unblocked': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
fakeStream.emit("open");
fakeStream.block();
stream.write("will not know it is blocked until first write", "utf8");
stream.write("so this one will be buffered, but not the previous one", "utf8");
fakeStream.unblock();
return fakeStream.writes;
},
'should send buffered data': function (writes) {
assert.equal(writes.length, 2);
assert.equal(writes[1], "so this one will be buffered, but not the previous one");
}
},
'when stream is closed': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
fakeStream.emit("open");
fakeStream.block();
stream.write("first write to notice stream is blocked", "utf8");
stream.write("data while blocked", "utf8");
stream.end();
return fakeStream.writes;
},
'should send any buffered writes to the stream': function (writes) {
assert.equal(writes.length, 2);
assert.equal(writes[1], "data while blocked");
}
},
'when stream errors': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
stream.on("error", this.callback);
fakeStream.emit("error", "oh noes!");
},
'should emit error': function(err, value) {
assert.equal(err, "oh noes!");
}
}
}).exportTo(module);

View File

@ -1,7 +1,9 @@
var vows = require('vows')
, async = require('async')
, assert = require('assert')
, events = require('events')
, fs = require('fs')
, streams = require('stream')
, RollingFileStream = require('../../lib/streams').RollingFileStream;
function remove(filename) {
@ -18,16 +20,17 @@ vows.describe('RollingFileStream').addBatch({
remove(__dirname + "/test-rolling-file-stream");
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
},
'should take a filename, file size in bytes, number of backups as arguments and return a FileWriteStream': function(stream) {
assert.instanceOf(stream, fs.FileWriteStream);
assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5);
'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) {
assert.instanceOf(stream, streams.Writable);
assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.mode, 420);
assert.equal(stream.flags, 'a');
assert.equal(stream.encoding, 'utf8');
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding isn't a property on the underlying stream
//assert.equal(stream.theStream.encoding, 'utf8');
}
},
'with stream arguments': {
@ -36,7 +39,7 @@ vows.describe('RollingFileStream').addBatch({
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
},
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.mode, 0666);
assert.equal(stream.theStream.mode, 0666);
}
},
'without size': {
@ -61,66 +64,63 @@ vows.describe('RollingFileStream').addBatch({
}
},
'writing less than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-less");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
stream.write("cheese", "utf8", function() {
stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
});
},
'should write to the file': function(contents) {
assert.equal(contents, "cheese");
},
'the number of files': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-less");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
stream.on("open", function() { that.callback(null, stream); });
fs.readdir(__dirname, this.callback);
},
'(when open)': {
topic: function(stream) {
stream.write("cheese", "utf8");
stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", this.callback);
},
'should write to the file': function(contents) {
assert.equal(contents, "cheese");
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be one': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
}
}
'should be one': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
}
}
},
'writing more than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) {
stream.write(i +".cheese\n", "utf8", cb);
}, function() {
stream.end();
that.callback();
});
},
'the number of files': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
stream.on("open", function() {
for (var i=0; i < 7; i++) {
stream.write(i +".cheese\n", "utf8");
}
//wait for the file system to catch up with us
setTimeout(that.callback, 100);
});
fs.readdir(__dirname, this.callback);
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }).length, 2);
}
},
'the first file': {
topic: function() {
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
},
'should contain the last two log messages': function(contents) {
assert.equal(contents, '5.cheese\n6.cheese\n');
}
},
'the second file': {
topic: function() {
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
},
'should contain the first five log messages': function(contents) {
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
}
'should be two': function(files) {
assert.equal(files.filter(
function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }
).length, 2);
}
},
'the first file': {
topic: function() {
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
},
'should contain the last two log messages': function(contents) {
assert.equal(contents, '5.cheese\n6.cheese\n');
}
},
'the second file': {
topic: function() {
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
},
'should contain the first five log messages': function(contents) {
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
}
}
}
}).exportTo(module);