more linting
This commit is contained in:
parent
07869b915f
commit
f24db59523
@ -1,3 +1,4 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
@ -40,7 +41,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
, testFile = path.join(__dirname, '/fa-default-test.log')
|
||||
, logger = log4js.getLogger('default-settings');
|
||||
remove(testFile);
|
||||
//log4js.configure({ appenders:[ { type: "file", filename: testFile, category: 'default-settings' } ] });
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
|
||||
|
||||
@ -54,7 +55,10 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
assert.include(fileContents, "This should be in the file.\n");
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(fileContents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /);
|
||||
assert.match(
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
},
|
||||
'with a max file size and no backups': {
|
||||
@ -66,7 +70,10 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
remove(testFile + '.1');
|
||||
//log file of 100 bytes maximum, no backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0), 'max-file-size');
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
'max-file-size'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is an intermediate log message.");
|
||||
logger.info("This is the second log message.");
|
||||
@ -85,7 +92,9 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
},
|
||||
'starting with the test file name should be two': function(err, files) {
|
||||
//there will always be one backup if you've specified a max log size
|
||||
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; });
|
||||
var logFiles = files.filter(
|
||||
function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; }
|
||||
);
|
||||
assert.equal(logFiles.length, 2);
|
||||
}
|
||||
}
|
||||
@ -100,7 +109,10 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2), 'max-file-size-backups');
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
@ -119,14 +131,20 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
},
|
||||
'the log files': {
|
||||
topic: function(files) {
|
||||
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; });
|
||||
var logFiles = files.filter(
|
||||
function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; }
|
||||
);
|
||||
return logFiles;
|
||||
},
|
||||
'should be 3': function (files) {
|
||||
assert.equal(files.length, 3);
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-test.log',
|
||||
'fa-maxFileSize-with-backups-test.log.1',
|
||||
'fa-maxFileSize-with-backups-test.log.2'
|
||||
]);
|
||||
},
|
||||
'and the contents of the first file': {
|
||||
topic: function(logFiles) {
|
||||
|
Loading…
Reference in New Issue
Block a user