Fixes for version v0.10 streams, breaks log4js for older versions of node
This commit is contained in:
parent
5e242c9dc9
commit
65e490cbd2
@ -1,4 +1,3 @@
|
|||||||
language: node_js
|
language: node_js
|
||||||
node_js:
|
node_js:
|
||||||
- 0.6
|
- 0.10
|
||||||
- 0.8
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
var log4js = require('./lib/log4js');
|
var log4js = require('../lib/log4js');
|
||||||
//log the cheese logger messages to a file, and the console ones as well.
|
//log the cheese logger messages to a file, and the console ones as well.
|
||||||
log4js.configure({
|
log4js.configure({
|
||||||
appenders: [
|
appenders: [
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
var log4js = require('./lib/log4js')
|
var log4js = require('../lib/log4js')
|
||||||
, log
|
, log
|
||||||
, i = 0;
|
, i = 0;
|
||||||
log4js.configure({
|
log4js.configure({
|
||||||
|
@ -22,7 +22,7 @@ process.on('exit', function() {
|
|||||||
function appender(filename, pattern, layout) {
|
function appender(filename, pattern, layout) {
|
||||||
layout = layout || layouts.basicLayout;
|
layout = layout || layouts.basicLayout;
|
||||||
|
|
||||||
var logFile = new streams.BufferedWriteStream(new streams.DateRollingFileStream(filename, pattern));
|
var logFile = new streams.DateRollingFileStream(filename, pattern);
|
||||||
openFiles.push(logFile);
|
openFiles.push(logFile);
|
||||||
|
|
||||||
return function(logEvent) {
|
return function(logEvent) {
|
||||||
|
@ -32,15 +32,13 @@ function fileAppender (file, layout, logSize, numBackups) {
|
|||||||
function openTheStream(file, fileSize, numFiles) {
|
function openTheStream(file, fileSize, numFiles) {
|
||||||
var stream;
|
var stream;
|
||||||
if (fileSize) {
|
if (fileSize) {
|
||||||
stream = new streams.BufferedWriteStream(
|
stream = new streams.RollingFileStream(
|
||||||
new streams.RollingFileStream(
|
|
||||||
file,
|
file,
|
||||||
fileSize,
|
fileSize,
|
||||||
numFiles
|
numFiles
|
||||||
)
|
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
stream = new streams.BufferedWriteStream(fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' }));
|
stream = fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' });
|
||||||
}
|
}
|
||||||
stream.on("error", function (err) {
|
stream.on("error", function (err) {
|
||||||
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
|
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
var fs = require('fs'),
|
var fs = require('fs'),
|
||||||
|
stream = require('stream'),
|
||||||
util = require('util');
|
util = require('util');
|
||||||
|
|
||||||
function debug(message) {
|
function debug(message) {
|
||||||
@ -8,14 +9,10 @@ function debug(message) {
|
|||||||
module.exports = BaseRollingFileStream;
|
module.exports = BaseRollingFileStream;
|
||||||
|
|
||||||
function BaseRollingFileStream(filename, options) {
|
function BaseRollingFileStream(filename, options) {
|
||||||
|
|
||||||
debug("In BaseRollingFileStream");
|
debug("In BaseRollingFileStream");
|
||||||
this.filename = filename;
|
this.filename = filename;
|
||||||
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
|
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
|
||||||
this.rolling = false;
|
|
||||||
this.writesWhileRolling = [];
|
|
||||||
this.currentSize = 0;
|
this.currentSize = 0;
|
||||||
this.rollBeforeWrite = false;
|
|
||||||
|
|
||||||
function currentFileSize(file) {
|
function currentFileSize(file) {
|
||||||
var fileSize = 0;
|
var fileSize = 0;
|
||||||
@ -35,58 +32,41 @@ debug("In BaseRollingFileStream");
|
|||||||
|
|
||||||
throwErrorIfArgumentsAreNotValid();
|
throwErrorIfArgumentsAreNotValid();
|
||||||
debug("Calling BaseRollingFileStream.super");
|
debug("Calling BaseRollingFileStream.super");
|
||||||
BaseRollingFileStream.super_.call(this, this.filename, this.options);
|
BaseRollingFileStream.super_.call(this);
|
||||||
|
this.openTheStream();
|
||||||
this.currentSize = currentFileSize(this.filename);
|
this.currentSize = currentFileSize(this.filename);
|
||||||
}
|
}
|
||||||
util.inherits(BaseRollingFileStream, fs.FileWriteStream);
|
util.inherits(BaseRollingFileStream, stream.Writable);
|
||||||
|
|
||||||
BaseRollingFileStream.prototype.initRolling = function() {
|
BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
|
||||||
var that = this;
|
var that = this;
|
||||||
|
function writeTheChunk() {
|
||||||
function emptyRollingQueue() {
|
debug("writing the chunk to the underlying stream");
|
||||||
debug("emptying the rolling queue");
|
that.currentSize += chunk.length;
|
||||||
var toWrite;
|
that.theStream.write(chunk, encoding, callback);
|
||||||
while ((toWrite = that.writesWhileRolling.shift())) {
|
|
||||||
BaseRollingFileStream.super_.prototype.write.call(that, toWrite.data, toWrite.encoding);
|
|
||||||
that.currentSize += toWrite.data.length;
|
|
||||||
if (that.shouldRoll()) {
|
|
||||||
that.flush();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
that.flush();
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.rolling = true;
|
debug("in _write");
|
||||||
this.roll(this.filename, function() {
|
|
||||||
that.currentSize = 0;
|
if (this.shouldRoll()) {
|
||||||
that.rolling = emptyRollingQueue();
|
this.currentSize = 0;
|
||||||
if (that.rolling) {
|
this.roll(this.filename, writeTheChunk);
|
||||||
process.nextTick(function() { that.initRolling(); });
|
} else {
|
||||||
|
writeTheChunk();
|
||||||
}
|
}
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
BaseRollingFileStream.prototype.write = function(data, encoding) {
|
BaseRollingFileStream.prototype.openTheStream = function(cb) {
|
||||||
var canWrite = false;
|
debug("opening the underlying stream");
|
||||||
if (this.rolling) {
|
this.theStream = fs.createWriteStream(this.filename, this.options);
|
||||||
this.writesWhileRolling.push({ data: data, encoding: encoding });
|
if (cb) {
|
||||||
} else {
|
this.theStream.on("open", cb);
|
||||||
if (this.rollBeforeWrite && this.shouldRoll()) {
|
}
|
||||||
this.writesWhileRolling.push({ data: data, encoding: encoding });
|
};
|
||||||
this.initRolling();
|
|
||||||
} else {
|
|
||||||
canWrite = BaseRollingFileStream.super_.prototype.write.call(this, data, encoding);
|
|
||||||
this.currentSize += data.length;
|
|
||||||
debug('current size = ' + this.currentSize);
|
|
||||||
|
|
||||||
if (!this.rollBeforeWrite && this.shouldRoll()) {
|
BaseRollingFileStream.prototype.closeTheStream = function(cb) {
|
||||||
this.initRolling();
|
debug("closing the underlying stream");
|
||||||
}
|
this.theStream.end(null, null, cb);
|
||||||
}
|
|
||||||
}
|
|
||||||
return canWrite;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
BaseRollingFileStream.prototype.shouldRoll = function() {
|
BaseRollingFileStream.prototype.shouldRoll = function() {
|
||||||
|
@ -1,78 +0,0 @@
|
|||||||
var events = require('events'),
|
|
||||||
Dequeue = require('dequeue'),
|
|
||||||
util = require('util');
|
|
||||||
|
|
||||||
module.exports = BufferedWriteStream;
|
|
||||||
|
|
||||||
function BufferedWriteStream(stream) {
|
|
||||||
var that = this;
|
|
||||||
this.stream = stream;
|
|
||||||
this.buffer = new Dequeue();
|
|
||||||
this.canWrite = false;
|
|
||||||
this.bytes = 0;
|
|
||||||
|
|
||||||
this.stream.on("open", function() {
|
|
||||||
that.canWrite = true;
|
|
||||||
that.flushBuffer();
|
|
||||||
});
|
|
||||||
|
|
||||||
this.stream.on("error", function (err) {
|
|
||||||
that.emit("error", err);
|
|
||||||
});
|
|
||||||
|
|
||||||
this.stream.on("drain", function() {
|
|
||||||
that.canWrite = true;
|
|
||||||
that.flushBuffer();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
util.inherits(BufferedWriteStream, events.EventEmitter);
|
|
||||||
|
|
||||||
Object.defineProperty(
|
|
||||||
BufferedWriteStream.prototype,
|
|
||||||
"fd",
|
|
||||||
{
|
|
||||||
get: function() { return this.stream.fd; },
|
|
||||||
set: function(newFd) {
|
|
||||||
this.stream.fd = newFd;
|
|
||||||
this.bytes = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
Object.defineProperty(
|
|
||||||
BufferedWriteStream.prototype,
|
|
||||||
"bytesWritten",
|
|
||||||
{
|
|
||||||
get: function() { return this.bytes; }
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.write = function(data, encoding) {
|
|
||||||
this.buffer.push({ data: data, encoding: encoding });
|
|
||||||
this.flushBuffer();
|
|
||||||
};
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.end = function(data, encoding) {
|
|
||||||
if (data) {
|
|
||||||
this.buffer.push({ data: data, encoding: encoding });
|
|
||||||
}
|
|
||||||
this.flushBufferEvenIfCannotWrite();
|
|
||||||
};
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
|
|
||||||
this.bytes += toWrite.data.length;
|
|
||||||
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
|
|
||||||
};
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
|
|
||||||
while (this.buffer.length > 0) {
|
|
||||||
this.writeToStream(this.buffer.shift());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.flushBuffer = function() {
|
|
||||||
while (this.buffer.length > 0 && this.canWrite) {
|
|
||||||
this.writeToStream(this.buffer.shift());
|
|
||||||
}
|
|
||||||
};
|
|
@ -23,9 +23,7 @@ function DateRollingFileStream(filename, pattern, options, now) {
|
|||||||
debug("this.now is " + this.now + ", now is " + now);
|
debug("this.now is " + this.now + ", now is " + now);
|
||||||
|
|
||||||
DateRollingFileStream.super_.call(this, filename, options);
|
DateRollingFileStream.super_.call(this, filename, options);
|
||||||
this.rollBeforeWrite = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
util.inherits(DateRollingFileStream, BaseRollingFileStream);
|
util.inherits(DateRollingFileStream, BaseRollingFileStream);
|
||||||
|
|
||||||
DateRollingFileStream.prototype.shouldRoll = function() {
|
DateRollingFileStream.prototype.shouldRoll = function() {
|
||||||
@ -45,14 +43,12 @@ DateRollingFileStream.prototype.roll = function(filename, callback) {
|
|||||||
newFilename = filename + this.previousTime;
|
newFilename = filename + this.previousTime;
|
||||||
|
|
||||||
debug("Starting roll");
|
debug("Starting roll");
|
||||||
debug("Queueing up data until we've finished rolling");
|
|
||||||
debug("Flushing underlying stream");
|
|
||||||
this.flush();
|
|
||||||
|
|
||||||
async.series([
|
async.series([
|
||||||
|
this.closeTheStream.bind(this),
|
||||||
deleteAnyExistingFile,
|
deleteAnyExistingFile,
|
||||||
renameTheCurrentFile,
|
renameTheCurrentFile,
|
||||||
openANewFile
|
this.openTheStream.bind(this)
|
||||||
], callback);
|
], callback);
|
||||||
|
|
||||||
function deleteAnyExistingFile(cb) {
|
function deleteAnyExistingFile(cb) {
|
||||||
@ -69,21 +65,4 @@ DateRollingFileStream.prototype.roll = function(filename, callback) {
|
|||||||
fs.rename(filename, newFilename, cb);
|
fs.rename(filename, newFilename, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
function openANewFile(cb) {
|
|
||||||
debug("Opening a new file");
|
|
||||||
fs.open(
|
|
||||||
filename,
|
|
||||||
that.options.flags,
|
|
||||||
that.options.mode,
|
|
||||||
function (err, fd) {
|
|
||||||
debug("opened new file");
|
|
||||||
var oldLogFileFD = that.fd;
|
|
||||||
that.fd = fd;
|
|
||||||
that.writable = true;
|
|
||||||
fs.close(oldLogFileFD, cb);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
};
|
};
|
||||||
|
@ -4,9 +4,9 @@ var BaseRollingFileStream = require('./BaseRollingFileStream'),
|
|||||||
fs = require('fs'),
|
fs = require('fs'),
|
||||||
async = require('async');
|
async = require('async');
|
||||||
|
|
||||||
function debug(message) {
|
function debug() {
|
||||||
// util.debug(message);
|
// util.debug(message);
|
||||||
// console.log(message);
|
// console.log.apply(console, arguments);
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = RollingFileStream;
|
module.exports = RollingFileStream;
|
||||||
@ -28,6 +28,7 @@ function RollingFileStream (filename, size, backups, options) {
|
|||||||
util.inherits(RollingFileStream, BaseRollingFileStream);
|
util.inherits(RollingFileStream, BaseRollingFileStream);
|
||||||
|
|
||||||
RollingFileStream.prototype.shouldRoll = function() {
|
RollingFileStream.prototype.shouldRoll = function() {
|
||||||
|
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
|
||||||
return this.currentSize >= this.size;
|
return this.currentSize >= this.size;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -81,30 +82,11 @@ RollingFileStream.prototype.roll = function(filename, callback) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function openANewFile(cb) {
|
debug("Rolling, rolling, rolling");
|
||||||
debug("Opening a new file");
|
|
||||||
fs.open(
|
|
||||||
filename,
|
|
||||||
that.options.flags,
|
|
||||||
that.options.mode,
|
|
||||||
function (err, fd) {
|
|
||||||
debug("opened new file");
|
|
||||||
var oldLogFileFD = that.fd;
|
|
||||||
that.fd = fd;
|
|
||||||
that.writable = true;
|
|
||||||
fs.close(oldLogFileFD, cb);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug("Starting roll");
|
|
||||||
debug("Queueing up data until we've finished rolling");
|
|
||||||
debug("Flushing underlying stream");
|
|
||||||
this.flush();
|
|
||||||
|
|
||||||
async.series([
|
async.series([
|
||||||
|
this.closeTheStream.bind(this),
|
||||||
renameTheFiles,
|
renameTheFiles,
|
||||||
openANewFile
|
this.openTheStream.bind(this)
|
||||||
], callback);
|
], callback);
|
||||||
|
|
||||||
};
|
};
|
||||||
|
@ -1,3 +1,2 @@
|
|||||||
exports.BufferedWriteStream = require('./BufferedWriteStream');
|
|
||||||
exports.RollingFileStream = require('./RollingFileStream');
|
exports.RollingFileStream = require('./RollingFileStream');
|
||||||
exports.DateRollingFileStream = require('./DateRollingFileStream');
|
exports.DateRollingFileStream = require('./DateRollingFileStream');
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "log4js",
|
"name": "log4js",
|
||||||
"version": "0.5.7",
|
"version": "0.6.0",
|
||||||
"description": "Port of Log4js to work with node.",
|
"description": "Port of Log4js to work with node.",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"logging",
|
"logging",
|
||||||
@ -17,7 +17,7 @@
|
|||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "http://github.com/nomiddlename/log4js-node/issues"
|
"url": "http://github.com/nomiddlename/log4js-node/issues"
|
||||||
},
|
},
|
||||||
"engines": [ "node >=0.6" ],
|
"engines": [ "node >=0.10" ],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "vows"
|
"test": "vows"
|
||||||
},
|
},
|
||||||
@ -30,7 +30,7 @@
|
|||||||
"dequeue": "1.0.3"
|
"dequeue": "1.0.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"vows": "0.6.2",
|
"vows": "0.7.0",
|
||||||
"sandboxed-module": "0.1.3",
|
"sandboxed-module": "0.1.3",
|
||||||
"hook.io": "0.8.10",
|
"hook.io": "0.8.10",
|
||||||
"underscore": "1.2.1"
|
"underscore": "1.2.1"
|
||||||
|
@ -108,7 +108,13 @@ vows.describe('log4js fileAppender').addBatch({
|
|||||||
var that = this;
|
var that = this;
|
||||||
//give the system a chance to open the stream
|
//give the system a chance to open the stream
|
||||||
setTimeout(function() {
|
setTimeout(function() {
|
||||||
fs.readdir(__dirname, that.callback);
|
fs.readdir(__dirname, function(err, files) {
|
||||||
|
if (files) {
|
||||||
|
that.callback(null, files.sort());
|
||||||
|
} else {
|
||||||
|
that.callback(err, files);
|
||||||
|
}
|
||||||
|
});
|
||||||
}, 200);
|
}, 200);
|
||||||
},
|
},
|
||||||
'the log files': {
|
'the log files': {
|
||||||
@ -120,30 +126,30 @@ vows.describe('log4js fileAppender').addBatch({
|
|||||||
assert.equal(files.length, 3);
|
assert.equal(files.length, 3);
|
||||||
},
|
},
|
||||||
'should be named in sequence': function (files) {
|
'should be named in sequence': function (files) {
|
||||||
assert.deepEqual(files.sort(), ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
|
assert.deepEqual(files, ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
|
||||||
},
|
},
|
||||||
'and the contents of the first file': {
|
'and the contents of the first file': {
|
||||||
topic: function(logFiles) {
|
topic: function(logFiles) {
|
||||||
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||||
},
|
},
|
||||||
'should be empty because the last log message triggers rolling': function(contents) {
|
'should be the last log message': function(contents) {
|
||||||
assert.isEmpty(contents);
|
assert.include(contents, 'This is the fourth log message.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'and the contents of the second file': {
|
'and the contents of the second file': {
|
||||||
topic: function(logFiles) {
|
topic: function(logFiles) {
|
||||||
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
|
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
|
||||||
},
|
},
|
||||||
'should be the last log message': function(contents) {
|
'should be the third log message': function(contents) {
|
||||||
assert.include(contents, 'This is the fourth log message.');
|
assert.include(contents, 'This is the third log message.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'and the contents of the third file': {
|
'and the contents of the third file': {
|
||||||
topic: function(logFiles) {
|
topic: function(logFiles) {
|
||||||
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
|
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
|
||||||
},
|
},
|
||||||
'should be the third log message': function(contents) {
|
'should be the second log message': function(contents) {
|
||||||
assert.include(contents, 'This is the third log message.');
|
assert.include(contents, 'This is the second log message.');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -50,9 +50,10 @@ vows.describe('log4js-abspath').addBatch({
|
|||||||
{
|
{
|
||||||
RollingFileStream: function(file) {
|
RollingFileStream: function(file) {
|
||||||
fileOpened = file;
|
fileOpened = file;
|
||||||
},
|
return {
|
||||||
BufferedWriteStream: function(other) {
|
on: function() {},
|
||||||
return { on: function() { }, end: function() {} }
|
end: function() {}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
var vows = require('vows'),
|
var vows = require('vows')
|
||||||
assert = require('assert'),
|
, assert = require('assert')
|
||||||
fs = require('fs'),
|
, streams = require('stream')
|
||||||
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream,
|
, fs = require('fs')
|
||||||
testTime = new Date(2012, 8, 12, 10, 37, 11);
|
, DateRollingFileStream = require('../../lib/streams').DateRollingFileStream
|
||||||
|
, testTime = new Date(2012, 8, 12, 10, 37, 11);
|
||||||
|
|
||||||
function cleanUp(filename) {
|
function cleanUp(filename) {
|
||||||
return function() {
|
return function() {
|
||||||
@ -19,15 +20,16 @@ vows.describe('DateRollingFileStream').addBatch({
|
|||||||
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
|
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
|
||||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
|
||||||
|
|
||||||
'should take a filename and a pattern and return a FileWriteStream': function(stream) {
|
'should take a filename and a pattern and return a WritableStream': function(stream) {
|
||||||
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
|
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
|
||||||
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
|
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
|
||||||
assert.instanceOf(stream, fs.FileWriteStream);
|
assert.instanceOf(stream, streams.Writable);
|
||||||
},
|
},
|
||||||
'with default settings for the underlying stream': function(stream) {
|
'with default settings for the underlying stream': function(stream) {
|
||||||
assert.equal(stream.mode, 420);
|
assert.equal(stream.theStream.mode, 420);
|
||||||
assert.equal(stream.flags, 'a');
|
assert.equal(stream.theStream.flags, 'a');
|
||||||
assert.equal(stream.encoding, 'utf8');
|
//encoding is not available on the underlying stream
|
||||||
|
//assert.equal(stream.encoding, 'utf8');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -45,7 +47,7 @@ vows.describe('DateRollingFileStream').addBatch({
|
|||||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
|
||||||
|
|
||||||
'should pass them to the underlying stream': function(stream) {
|
'should pass them to the underlying stream': function(stream) {
|
||||||
assert.equal(stream.mode, 0666);
|
assert.equal(stream.theStream.mode, 0666);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -54,7 +56,7 @@ vows.describe('DateRollingFileStream').addBatch({
|
|||||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
|
||||||
|
|
||||||
'should pass them to the underlying stream': function(stream) {
|
'should pass them to the underlying stream': function(stream) {
|
||||||
assert.equal(stream.mode, 0666);
|
assert.equal(stream.theStream.mode, 0666);
|
||||||
},
|
},
|
||||||
'should use default pattern': function(stream) {
|
'should use default pattern': function(stream) {
|
||||||
assert.equal(stream.pattern, '.yyyy-MM-dd');
|
assert.equal(stream.pattern, '.yyyy-MM-dd');
|
||||||
@ -65,9 +67,7 @@ vows.describe('DateRollingFileStream').addBatch({
|
|||||||
topic: function() {
|
topic: function() {
|
||||||
var that = this,
|
var that = this,
|
||||||
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
|
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
|
||||||
stream.on("open", function() {
|
stream.write("First message\n", 'utf8', function() {
|
||||||
stream.write("First message\n");
|
|
||||||
//wait for the file system to catch up with us
|
|
||||||
that.callback(null, stream);
|
that.callback(null, stream);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@ -85,8 +85,7 @@ vows.describe('DateRollingFileStream').addBatch({
|
|||||||
'when the day changes': {
|
'when the day changes': {
|
||||||
topic: function(stream) {
|
topic: function(stream) {
|
||||||
testTime = new Date(2012, 8, 13, 0, 10, 12);
|
testTime = new Date(2012, 8, 13, 0, 10, 12);
|
||||||
stream.write("Second message\n");
|
stream.write("Second message\n", 'utf8', this.callback);
|
||||||
setTimeout(this.callback, 100);
|
|
||||||
},
|
},
|
||||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
|
||||||
|
|
||||||
|
@ -1,130 +0,0 @@
|
|||||||
var vows = require('vows')
|
|
||||||
, assert = require('assert')
|
|
||||||
, events = require('events')
|
|
||||||
, BufferedWriteStream = require('../../lib/streams').BufferedWriteStream;
|
|
||||||
|
|
||||||
function FakeStream() {
|
|
||||||
this.writes = [];
|
|
||||||
this.canWrite = false;
|
|
||||||
this.callbacks = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.on = function(event, callback) {
|
|
||||||
this.callbacks[event] = callback;
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.write = function(data, encoding) {
|
|
||||||
assert.equal("utf8", encoding);
|
|
||||||
this.writes.push(data);
|
|
||||||
return this.canWrite;
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.emit = function(event, payload) {
|
|
||||||
this.callbacks[event](payload);
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.block = function() {
|
|
||||||
this.canWrite = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.unblock = function() {
|
|
||||||
this.canWrite = true;
|
|
||||||
this.emit("drain");
|
|
||||||
}
|
|
||||||
|
|
||||||
vows.describe('BufferedWriteStream').addBatch({
|
|
||||||
'stream': {
|
|
||||||
topic: new BufferedWriteStream(new FakeStream()),
|
|
||||||
'should take a stream as an argument and return a stream': function(stream) {
|
|
||||||
assert.instanceOf(stream, events.EventEmitter);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'before stream is open': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
stream.write("Some data", "utf8");
|
|
||||||
stream.write("Some more data", "utf8");
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should buffer writes': function(writes) {
|
|
||||||
assert.equal(writes.length, 0);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream is open': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
stream.write("Some data", "utf8");
|
|
||||||
fakeStream.canWrite = true;
|
|
||||||
fakeStream.emit("open");
|
|
||||||
stream.write("Some more data", "utf8");
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should write data to stream from before stream was open': function (writes) {
|
|
||||||
assert.equal(writes[0], "Some data");
|
|
||||||
},
|
|
||||||
'should write data to stream from after stream was open': function (writes) {
|
|
||||||
assert.equal(writes[1], "Some more data");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream is blocked': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
fakeStream.emit("open");
|
|
||||||
fakeStream.block();
|
|
||||||
stream.write("will not know it is blocked until first write", "utf8");
|
|
||||||
stream.write("so this one will be buffered, but not the previous one", "utf8");
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should buffer writes': function (writes) {
|
|
||||||
assert.equal(writes.length, 1);
|
|
||||||
assert.equal(writes[0], "will not know it is blocked until first write");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream is unblocked': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
fakeStream.emit("open");
|
|
||||||
fakeStream.block();
|
|
||||||
stream.write("will not know it is blocked until first write", "utf8");
|
|
||||||
stream.write("so this one will be buffered, but not the previous one", "utf8");
|
|
||||||
fakeStream.unblock();
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should send buffered data': function (writes) {
|
|
||||||
assert.equal(writes.length, 2);
|
|
||||||
assert.equal(writes[1], "so this one will be buffered, but not the previous one");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream is closed': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
fakeStream.emit("open");
|
|
||||||
fakeStream.block();
|
|
||||||
stream.write("first write to notice stream is blocked", "utf8");
|
|
||||||
stream.write("data while blocked", "utf8");
|
|
||||||
stream.end();
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should send any buffered writes to the stream': function (writes) {
|
|
||||||
assert.equal(writes.length, 2);
|
|
||||||
assert.equal(writes[1], "data while blocked");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream errors': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
stream.on("error", this.callback);
|
|
||||||
fakeStream.emit("error", "oh noes!");
|
|
||||||
},
|
|
||||||
'should emit error': function(err, value) {
|
|
||||||
assert.equal(err, "oh noes!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}).exportTo(module);
|
|
@ -1,7 +1,9 @@
|
|||||||
var vows = require('vows')
|
var vows = require('vows')
|
||||||
|
, async = require('async')
|
||||||
, assert = require('assert')
|
, assert = require('assert')
|
||||||
, events = require('events')
|
, events = require('events')
|
||||||
, fs = require('fs')
|
, fs = require('fs')
|
||||||
|
, streams = require('stream')
|
||||||
, RollingFileStream = require('../../lib/streams').RollingFileStream;
|
, RollingFileStream = require('../../lib/streams').RollingFileStream;
|
||||||
|
|
||||||
function remove(filename) {
|
function remove(filename) {
|
||||||
@ -18,16 +20,17 @@ vows.describe('RollingFileStream').addBatch({
|
|||||||
remove(__dirname + "/test-rolling-file-stream");
|
remove(__dirname + "/test-rolling-file-stream");
|
||||||
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
|
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
|
||||||
},
|
},
|
||||||
'should take a filename, file size in bytes, number of backups as arguments and return a FileWriteStream': function(stream) {
|
'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) {
|
||||||
assert.instanceOf(stream, fs.FileWriteStream);
|
assert.instanceOf(stream, streams.Writable);
|
||||||
assert.equal(stream.filename, "test-rolling-file-stream");
|
assert.equal(stream.filename, "test-rolling-file-stream");
|
||||||
assert.equal(stream.size, 1024);
|
assert.equal(stream.size, 1024);
|
||||||
assert.equal(stream.backups, 5);
|
assert.equal(stream.backups, 5);
|
||||||
},
|
},
|
||||||
'with default settings for the underlying stream': function(stream) {
|
'with default settings for the underlying stream': function(stream) {
|
||||||
assert.equal(stream.mode, 420);
|
assert.equal(stream.theStream.mode, 420);
|
||||||
assert.equal(stream.flags, 'a');
|
assert.equal(stream.theStream.flags, 'a');
|
||||||
assert.equal(stream.encoding, 'utf8');
|
//encoding isn't a property on the underlying stream
|
||||||
|
//assert.equal(stream.theStream.encoding, 'utf8');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'with stream arguments': {
|
'with stream arguments': {
|
||||||
@ -36,7 +39,7 @@ vows.describe('RollingFileStream').addBatch({
|
|||||||
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
|
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
|
||||||
},
|
},
|
||||||
'should pass them to the underlying stream': function(stream) {
|
'should pass them to the underlying stream': function(stream) {
|
||||||
assert.equal(stream.mode, 0666);
|
assert.equal(stream.theStream.mode, 0666);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'without size': {
|
'without size': {
|
||||||
@ -64,13 +67,10 @@ vows.describe('RollingFileStream').addBatch({
|
|||||||
topic: function() {
|
topic: function() {
|
||||||
remove(__dirname + "/test-rolling-file-stream-write-less");
|
remove(__dirname + "/test-rolling-file-stream-write-less");
|
||||||
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
|
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
|
||||||
stream.on("open", function() { that.callback(null, stream); });
|
stream.write("cheese", "utf8", function() {
|
||||||
},
|
|
||||||
'(when open)': {
|
|
||||||
topic: function(stream) {
|
|
||||||
stream.write("cheese", "utf8");
|
|
||||||
stream.end();
|
stream.end();
|
||||||
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", this.callback);
|
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
|
||||||
|
});
|
||||||
},
|
},
|
||||||
'should write to the file': function(contents) {
|
'should write to the file': function(contents) {
|
||||||
assert.equal(contents, "cheese");
|
assert.equal(contents, "cheese");
|
||||||
@ -83,19 +83,17 @@ vows.describe('RollingFileStream').addBatch({
|
|||||||
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
|
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
},
|
},
|
||||||
'writing more than the file size': {
|
'writing more than the file size': {
|
||||||
topic: function() {
|
topic: function() {
|
||||||
remove(__dirname + "/test-rolling-file-stream-write-more");
|
remove(__dirname + "/test-rolling-file-stream-write-more");
|
||||||
remove(__dirname + "/test-rolling-file-stream-write-more.1");
|
remove(__dirname + "/test-rolling-file-stream-write-more.1");
|
||||||
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
|
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
|
||||||
stream.on("open", function() {
|
async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) {
|
||||||
for (var i=0; i < 7; i++) {
|
stream.write(i +".cheese\n", "utf8", cb);
|
||||||
stream.write(i +".cheese\n", "utf8");
|
}, function() {
|
||||||
}
|
stream.end();
|
||||||
//wait for the file system to catch up with us
|
that.callback();
|
||||||
setTimeout(that.callback, 100);
|
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
'the number of files': {
|
'the number of files': {
|
||||||
@ -103,7 +101,9 @@ vows.describe('RollingFileStream').addBatch({
|
|||||||
fs.readdir(__dirname, this.callback);
|
fs.readdir(__dirname, this.callback);
|
||||||
},
|
},
|
||||||
'should be two': function(files) {
|
'should be two': function(files) {
|
||||||
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }).length, 2);
|
assert.equal(files.filter(
|
||||||
|
function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }
|
||||||
|
).length, 2);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'the first file': {
|
'the first file': {
|
||||||
|
Loading…
Reference in New Issue
Block a user