2012-09-05 08:58:28 +08:00
|
|
|
var events = require('events'),
|
Speed up file logging for high rate of logging.
During an evaluation of multiple loggers, I saw a slow down when trying to
quickly log more than 100,000 messages to a file:
```javascript
counter = 150000;
while (counter) {
logger.info('Message[' + counter + ']');
counter -= 1;
}
```
My detailed test can be found here:
- https://gist.github.com/NicolasPelletier/4773843
The test demonstrate that writing 150,000 lines straight in a FileStream
takes about 22 seconds until the file content stabilizes. When calling
logger.debug() 150,000 times, the file stabilizes to its final content
after 229s ( almost 4 minutes ! ).
After investigation, it turns out that the problem is using an Array() to
accumulate the data. Pushing the data in the Array with Array.push() is
quick, but the code flushing the buffer uses Array.shift(), which forces
re-indexing of all 149,999 elements remaining in the Array. This is
exponentially slower as the buffer grows.
The solution is to use something else than an Array to accumulate the
messages. The fix was made using a package called Dequeue
( https://github.com/lleo/node-dequeue ). By replacing the Array with
a Dequeue object, it brought the logging of 150,000 messages back down to
31s. Seven times faster than the previous 229s.
There is a caveat that each log event is slightly longer due to the need
to create an object to put in the double-ended queue inside the Dequeue
object. According to a quick test, it takes about 4% more time per call
to logger.debug().
2013-02-13 22:35:02 +08:00
|
|
|
Dequeue = require('dequeue'),
|
2012-09-05 08:58:28 +08:00
|
|
|
util = require('util');
|
|
|
|
|
|
|
|
module.exports = BufferedWriteStream;
|
|
|
|
|
|
|
|
function BufferedWriteStream(stream) {
|
|
|
|
var that = this;
|
|
|
|
this.stream = stream;
|
Speed up file logging for high rate of logging.
During an evaluation of multiple loggers, I saw a slow down when trying to
quickly log more than 100,000 messages to a file:
```javascript
counter = 150000;
while (counter) {
logger.info('Message[' + counter + ']');
counter -= 1;
}
```
My detailed test can be found here:
- https://gist.github.com/NicolasPelletier/4773843
The test demonstrate that writing 150,000 lines straight in a FileStream
takes about 22 seconds until the file content stabilizes. When calling
logger.debug() 150,000 times, the file stabilizes to its final content
after 229s ( almost 4 minutes ! ).
After investigation, it turns out that the problem is using an Array() to
accumulate the data. Pushing the data in the Array with Array.push() is
quick, but the code flushing the buffer uses Array.shift(), which forces
re-indexing of all 149,999 elements remaining in the Array. This is
exponentially slower as the buffer grows.
The solution is to use something else than an Array to accumulate the
messages. The fix was made using a package called Dequeue
( https://github.com/lleo/node-dequeue ). By replacing the Array with
a Dequeue object, it brought the logging of 150,000 messages back down to
31s. Seven times faster than the previous 229s.
There is a caveat that each log event is slightly longer due to the need
to create an object to put in the double-ended queue inside the Dequeue
object. According to a quick test, it takes about 4% more time per call
to logger.debug().
2013-02-13 22:35:02 +08:00
|
|
|
this.buffer = new Dequeue();
|
2012-09-05 08:58:28 +08:00
|
|
|
this.canWrite = false;
|
|
|
|
this.bytes = 0;
|
|
|
|
|
|
|
|
this.stream.on("open", function() {
|
|
|
|
that.canWrite = true;
|
|
|
|
that.flushBuffer();
|
|
|
|
});
|
|
|
|
|
|
|
|
this.stream.on("error", function (err) {
|
|
|
|
that.emit("error", err);
|
|
|
|
});
|
|
|
|
|
|
|
|
this.stream.on("drain", function() {
|
|
|
|
that.canWrite = true;
|
|
|
|
that.flushBuffer();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
util.inherits(BufferedWriteStream, events.EventEmitter);
|
|
|
|
|
|
|
|
Object.defineProperty(
|
|
|
|
BufferedWriteStream.prototype,
|
|
|
|
"fd",
|
|
|
|
{
|
|
|
|
get: function() { return this.stream.fd; },
|
|
|
|
set: function(newFd) {
|
|
|
|
this.stream.fd = newFd;
|
|
|
|
this.bytes = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
);
|
|
|
|
|
|
|
|
Object.defineProperty(
|
|
|
|
BufferedWriteStream.prototype,
|
|
|
|
"bytesWritten",
|
|
|
|
{
|
|
|
|
get: function() { return this.bytes; }
|
|
|
|
}
|
|
|
|
);
|
|
|
|
|
|
|
|
BufferedWriteStream.prototype.write = function(data, encoding) {
|
|
|
|
this.buffer.push({ data: data, encoding: encoding });
|
|
|
|
this.flushBuffer();
|
|
|
|
};
|
|
|
|
|
|
|
|
BufferedWriteStream.prototype.end = function(data, encoding) {
|
|
|
|
if (data) {
|
|
|
|
this.buffer.push({ data: data, encoding: encoding });
|
|
|
|
}
|
|
|
|
this.flushBufferEvenIfCannotWrite();
|
|
|
|
};
|
|
|
|
|
|
|
|
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
|
|
|
|
this.bytes += toWrite.data.length;
|
|
|
|
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
|
|
|
|
};
|
|
|
|
|
|
|
|
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
|
|
|
|
while (this.buffer.length > 0) {
|
|
|
|
this.writeToStream(this.buffer.shift());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
BufferedWriteStream.prototype.flushBuffer = function() {
|
|
|
|
while (this.buffer.length > 0 && this.canWrite) {
|
|
|
|
this.writeToStream(this.buffer.shift());
|
|
|
|
}
|
|
|
|
};
|