From 3b55aefe6f7d8a0929d2ebf6f663f21ac01f8a82 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 2 Aug 2013 11:36:05 +1000 Subject: [PATCH 01/53] changed logger to not use events. everything is broken --- lib/log4js.js | 80 ++++++++++++++++++-------------------------- lib/logger.js | 13 +++---- test/logger-test.js | 20 ++++++++++- test/logging-test.js | 5 +-- 4 files changed, 59 insertions(+), 59 deletions(-) diff --git a/lib/log4js.js b/lib/log4js.js index fc3bacad..d23aefae 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -43,7 +43,7 @@ * @static * Website: http://log4js.berlios.de */ -var events = require('events') +var debug = require('./debug')('log4js-core') , fs = require('fs') , path = require('path') , util = require('util') @@ -51,9 +51,9 @@ var events = require('events') , levels = require('./levels') , LoggingEvent = require('./logger').LoggingEvent , Logger = require('./logger').Logger +, categoryLevels = {} , ALL_CATEGORIES = '[all]' , appenders = {} -, loggers = {} , appenderMakers = {} , defaultConfig = { appenders: [ @@ -75,25 +75,9 @@ function getLogger (categoryName) { categoryName = Logger.DEFAULT_CATEGORY; } - var appenderList; - if (!loggers[categoryName]) { - // Create the logger for this name if it doesn't already exist - loggers[categoryName] = new Logger(categoryName); - if (appenders[categoryName]) { - appenderList = appenders[categoryName]; - appenderList.forEach(function(appender) { - loggers[categoryName].addListener("log", appender); - }); - } - if (appenders[ALL_CATEGORIES]) { - appenderList = appenders[ALL_CATEGORIES]; - appenderList.forEach(function(appender) { - loggers[categoryName].addListener("log", appender); - }); - } - } - - return loggers[categoryName]; + debug("getLogger(" + categoryName + ")"); + + return new Logger(categoryName, levels.toLevel(categoryLevels[categoryName]) || null); } /** @@ -111,25 +95,12 @@ function addAppender () { } args.forEach(function(category) { - addAppenderToCategory(appender, category); - - if (category === ALL_CATEGORIES) { - addAppenderToAllLoggers(appender); - } else if (loggers[category]) { - loggers[category].addListener("log", appender); - } + addAppenderToCategory(appender, category); }); } -function addAppenderToAllLoggers(appender) { - for (var logger in loggers) { - if (loggers.hasOwnProperty(logger)) { - loggers[logger].addListener("log", appender); - } - } -} - function addAppenderToCategory(appender, category) { + debug("adding appender " + appender + " to category " + category); if (!appenders[category]) { appenders[category] = []; } @@ -137,12 +108,8 @@ function addAppenderToCategory(appender, category) { } function clearAppenders () { + debug("clearing appenders"); appenders = {}; - for (var logger in loggers) { - if (loggers.hasOwnProperty(logger)) { - loggers[logger].removeAllListeners("log"); - } - } } function configureAppenders(appenderList, options) { @@ -163,13 +130,7 @@ function configureAppenders(appenderList, options) { } function configureLevels(levels) { - if (levels) { - for (var category in levels) { - if (levels.hasOwnProperty(category)) { - getLogger(category).setLevel(levels[category]); - } - } - } + categoryLevels = levels || {}; } function setGlobalLogLevel(level) { @@ -185,6 +146,27 @@ function getDefaultLogger () { return getLogger(Logger.DEFAULT_CATEGORY); } +/** + * Log event routing to appenders + * This would be a good place to implement category hierarchies/wildcards, etc + */ +function dispatch(event) { + debug("event is " + util.inspect(event)); + if (appenders[event.category]) { + dispatchToCategory(event.category, event); + } + + if (appenders[ALL_CATEGORIES]) { + dispatchToCategory(ALL_CATEGORIES, event); + } +} + +function dispatchToCategory(category, event) { + appenders[category].forEach(function(appender) { + appender(event); + }); +} + var configState = {}; function loadConfigurationFile(filename) { @@ -304,6 +286,7 @@ function loadAppender(appender) { module.exports = { getLogger: getLogger, getDefaultLogger: getDefaultLogger, + dispatch: dispatch, addAppender: addAppender, loadAppender: loadAppender, @@ -323,5 +306,6 @@ module.exports = { }; //set ourselves up +debug("Starting configuration"); configure(); diff --git a/lib/logger.js b/lib/logger.js index 4da0dafb..03d6f046 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -1,7 +1,7 @@ "use strict"; var levels = require('./levels') , util = require('util') -, events = require('events') +, log4js = require('./log4js') , DEFAULT_CATEGORY = '[default]'; /** @@ -10,15 +10,13 @@ var levels = require('./levels') * @param {String} categoryName name of category * @param {Log4js.Level} level level of message * @param {Array} data objects to log - * @param {Log4js.Logger} logger the associated logger * @author Seth Chisamore */ -function LoggingEvent (categoryName, level, data, logger) { +function LoggingEvent (categoryName, level, data) { this.startTime = new Date(); this.categoryName = categoryName; this.data = data; this.level = level; - this.logger = logger; } /** @@ -35,7 +33,6 @@ function Logger (name, level) { this.setLevel(level); } } -util.inherits(Logger, events.EventEmitter); Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY; Logger.prototype.level = levels.TRACE; @@ -50,8 +47,9 @@ Logger.prototype.removeLevel = function() { Logger.prototype.log = function() { var args = Array.prototype.slice.call(arguments) , logLevel = args.shift() - , loggingEvent = new LoggingEvent(this.category, logLevel, args, this); - this.emit("log", loggingEvent); + , loggingEvent = new LoggingEvent(this.category, logLevel, args); + + log4js.dispatch(loggingEvent); }; Logger.prototype.isLevelEnabled = function(otherLevel) { @@ -75,6 +73,5 @@ Logger.prototype.isLevelEnabled = function(otherLevel) { } ); - exports.LoggingEvent = LoggingEvent; exports.Logger = Logger; diff --git a/test/logger-test.js b/test/logger-test.js index 55899f28..9ff31df9 100644 --- a/test/logger-test.js +++ b/test/logger-test.js @@ -2,7 +2,8 @@ var vows = require('vows') , assert = require('assert') , levels = require('../lib/levels') -, Logger = require('../lib/logger').Logger; +, Logger = require('../lib/logger').Logger +, log4js = require('../lib/log4js'); vows.describe('../lib/logger').addBatch({ 'constructor with no parameters': { @@ -53,5 +54,22 @@ vows.describe('../lib/logger').addBatch({ assert.isTrue(logger.isErrorEnabled()); assert.isTrue(logger.isFatalEnabled()); } + }, + + 'log': { + topic: new Logger('testing'), + 'should send log events to log4js': function(logger) { + var evt, original = log4js.dispatch; + log4js.dispatch = function(event) { + evt = event; + }; + + logger.log(levels.DEBUG, "cheese"); + log4js.dispatch = original; + + assert.equal(evt.categoryName, 'testing'); + assert.equal(evt.level, levels.DEBUG); + assert.equal(evt.data[0], "cheese"); + } } }).exportTo(module); diff --git a/test/logging-test.js b/test/logging-test.js index 32ff099c..f173e417 100644 --- a/test/logging-test.js +++ b/test/logging-test.js @@ -54,7 +54,7 @@ vows.describe('log4js').addBatch({ 'log events' : { topic: function(logger) { var events = []; - logger.addListener("log", function (logEvent) { events.push(logEvent); }); + log4js.addAppender(function (logEvent) { events.push(logEvent); }, "tests"); logger.debug("Debug event"); logger.trace("Trace event 1"); logger.trace("Trace event 2"); @@ -83,7 +83,7 @@ vows.describe('log4js').addBatch({ }, }, - +/* 'invalid configuration': { 'should throw an exception': function() { assert.throws(function() { @@ -509,4 +509,5 @@ vows.describe('log4js').addBatch({ assert.ok(logger.error); } } +*/ }).export(module); From 5bd7ce3ab997b32bc9dce3da1958ad952ea84d1b Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 2 Aug 2013 15:12:04 +1000 Subject: [PATCH 02/53] working, except for tests which expect log levels to persist across getLogger calls --- lib/log4js.js | 14 +++++++++----- lib/logger.js | 13 ++++++++----- test/logger-test.js | 15 ++++++--------- test/logging-test.js | 1 + 4 files changed, 24 insertions(+), 19 deletions(-) diff --git a/lib/log4js.js b/lib/log4js.js index d23aefae..9f4370c8 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -43,7 +43,7 @@ * @static * Website: http://log4js.berlios.de */ -var debug = require('./debug')('log4js-core') +var debug = require('./debug')('core') , fs = require('fs') , path = require('path') , util = require('util') @@ -77,7 +77,7 @@ function getLogger (categoryName) { debug("getLogger(" + categoryName + ")"); - return new Logger(categoryName, levels.toLevel(categoryLevels[categoryName]) || null); + return new Logger(categoryName, categoryLevels[categoryName] || null, dispatch); } /** @@ -151,18 +151,22 @@ function getDefaultLogger () { * This would be a good place to implement category hierarchies/wildcards, etc */ function dispatch(event) { - debug("event is " + util.inspect(event)); - if (appenders[event.category]) { - dispatchToCategory(event.category, event); + debug("event is " + util.inspect(event)); + debug("appenders is " + util.inspect(appenders)); + if (appenders[event.categoryName]) { + debug("appender defined for " + event.categoryName); + dispatchToCategory(event.categoryName, event); } if (appenders[ALL_CATEGORIES]) { + debug("appender defined for " + ALL_CATEGORIES); dispatchToCategory(ALL_CATEGORIES, event); } } function dispatchToCategory(category, event) { appenders[category].forEach(function(appender) { + debug("Sending " + util.inspect(event) + " to appender " + appender); appender(event); }); } diff --git a/lib/logger.js b/lib/logger.js index 03d6f046..5a1f81b6 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -1,7 +1,7 @@ "use strict"; -var levels = require('./levels') +var debug = require('./debug')('logger') +, levels = require('./levels') , util = require('util') -, log4js = require('./log4js') , DEFAULT_CATEGORY = '[default]'; /** @@ -26,17 +26,20 @@ function LoggingEvent (categoryName, level, data) { * @param name name of category to log to * @author Stephan Strittmatter */ -function Logger (name, level) { +function Logger (name, level, dispatch) { this.category = name || DEFAULT_CATEGORY; if (level) { this.setLevel(level); } + + this.dispatch = dispatch; } Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY; Logger.prototype.level = levels.TRACE; Logger.prototype.setLevel = function(level) { + debug("setting level to " + level); this.level = levels.toLevel(level, this.level || levels.TRACE); }; @@ -48,8 +51,8 @@ Logger.prototype.log = function() { var args = Array.prototype.slice.call(arguments) , logLevel = args.shift() , loggingEvent = new LoggingEvent(this.category, logLevel, args); - - log4js.dispatch(loggingEvent); + debug("Logging event " + loggingEvent + " to dispatch = " + util.inspect(this.dispatch)); + this.dispatch(loggingEvent); }; Logger.prototype.isLevelEnabled = function(otherLevel) { diff --git a/test/logger-test.js b/test/logger-test.js index 9ff31df9..e47e8e46 100644 --- a/test/logger-test.js +++ b/test/logger-test.js @@ -57,16 +57,13 @@ vows.describe('../lib/logger').addBatch({ }, 'log': { - topic: new Logger('testing'), - 'should send log events to log4js': function(logger) { - var evt, original = log4js.dispatch; - log4js.dispatch = function(event) { - evt = event; - }; - + topic: function() { + var evt + , logger = new Logger('testing', null, function(event) { evt = event; }); logger.log(levels.DEBUG, "cheese"); - log4js.dispatch = original; - + return evt; + }, + 'should send log events to log4js': function(evt) { assert.equal(evt.categoryName, 'testing'); assert.equal(evt.level, levels.DEBUG); assert.equal(evt.data[0], "cheese"); diff --git a/test/logging-test.js b/test/logging-test.js index f173e417..8615ac0d 100644 --- a/test/logging-test.js +++ b/test/logging-test.js @@ -54,6 +54,7 @@ vows.describe('log4js').addBatch({ 'log events' : { topic: function(logger) { var events = []; + var log4js = require('../lib/log4js'); log4js.addAppender(function (logEvent) { events.push(logEvent); }, "tests"); logger.debug("Debug event"); logger.trace("Trace event 1"); From 9c510f7705f367e6f2635095731b77cd53745924 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 5 Aug 2013 07:51:21 +1000 Subject: [PATCH 03/53] added weak references dep --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index b0056adf..418a5419 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,8 @@ "async": "0.1.15", "dequeue": "1.0.3", "semver": "~1.1.4", - "readable-stream": "~1.0.2" + "readable-stream": "~1.0.2", + "weak": "~0.2.2" }, "devDependencies": { "vows": "0.7.0", From 9897dcbc9306cbaf6bbae3e04b30f0f16b431f54 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 5 Aug 2013 11:19:53 +1000 Subject: [PATCH 04/53] trying out weak references, don't think they're going to help --- lib/log4js.js | 30 ++++++++++++++++++++++++++---- 1 file changed, 26 insertions(+), 4 deletions(-) diff --git a/lib/log4js.js b/lib/log4js.js index 9f4370c8..22ac913d 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -44,6 +44,7 @@ * Website: http://log4js.berlios.de */ var debug = require('./debug')('core') +, weak = require('weak') , fs = require('fs') , path = require('path') , util = require('util') @@ -51,7 +52,9 @@ var debug = require('./debug')('core') , levels = require('./levels') , LoggingEvent = require('./logger').LoggingEvent , Logger = require('./logger').Logger +, loggerRefs = [] , categoryLevels = {} +, globalLogLevel = null , ALL_CATEGORIES = '[all]' , appenders = {} , appenderMakers = {} @@ -63,21 +66,31 @@ var debug = require('./debug')('core') }; /** - * Get a logger instance. Instance is cached on categoryName level. + * Get a logger instance. * @param {String} categoryName name of category to log to. * @return {Logger} instance of logger for the category * @static */ function getLogger (categoryName) { + var level, logger, ref; // Use default logger if categoryName is not specified or invalid if (typeof categoryName !== "string") { categoryName = Logger.DEFAULT_CATEGORY; } - debug("getLogger(" + categoryName + ")"); + level = categoryLevels[categoryName]; - return new Logger(categoryName, categoryLevels[categoryName] || null, dispatch); + if (globalLogLevel) { + level = globalLogLevel; + } + + debug("getLogger(" + categoryName + ") - level is " + level); + logger = new Logger(categoryName, level || null, dispatch); + ref = weak(logger); + loggerRefs.push(ref); + + return logger; } /** @@ -134,7 +147,16 @@ function configureLevels(levels) { } function setGlobalLogLevel(level) { - Logger.prototype.level = levels.toLevel(level, levels.TRACE); + //Logger.prototype.level = levels.toLevel(level, levels.TRACE); + globalLogLevel = level; + var workingRefs = []; + loggerRefs.forEach(function(logger) { + if (logger.setLevel) { + workingRefs.push(logger); + logger.setLevel(level); + } + }); + loggerRefs = workingRefs; } /** From c60d62960822bfa1017ac8cdd17ae5c8b0630765 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 8 Aug 2013 08:56:09 +1000 Subject: [PATCH 05/53] added mocha, simplified logger by removing levels and making immutable --- lib/logger.js | 99 ++++++++++++++------------------------- package.json | 4 +- test/logger-test.js | 112 ++++++++++++++++++-------------------------- 3 files changed, 83 insertions(+), 132 deletions(-) diff --git a/lib/logger.js b/lib/logger.js index 5a1f81b6..7f85956f 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -1,80 +1,49 @@ "use strict"; var debug = require('./debug')('logger') -, levels = require('./levels') -, util = require('util') -, DEFAULT_CATEGORY = '[default]'; +, util = require('util'); + +module.exports = function Logger(dispatch, category) { + if (typeof dispatch !== 'function') { + throw new Error("Logger must have a dispatch delegate."); + } + + if (!category) { + throw new Error("Logger must have a category."); + } + + function log() { + var args = Array.prototype.slice.call(arguments) + , logLevel = args.shift() + , loggingEvent = new LoggingEvent(category, logLevel, args); + debug("Logging event " + loggingEvent + " to dispatch = " + util.inspect(dispatch)); + dispatch(loggingEvent); + } + + var self = this; + ['trace','debug','info','warn','error','fatal'].forEach( + function(level) { + self[level] = function() { + var args = Array.prototype.slice.call(arguments); + args.unshift(level); + log.apply(this, args); + }; + } + ); + +}; /** * Models a logging event. * @constructor - * @param {String} categoryName name of category + * @param {String} category name of category * @param {Log4js.Level} level level of message * @param {Array} data objects to log * @author Seth Chisamore */ -function LoggingEvent (categoryName, level, data) { +function LoggingEvent (category, level, data) { this.startTime = new Date(); - this.categoryName = categoryName; + this.category = category; this.data = data; this.level = level; } -/** - * Logger to log messages. - * use {@see Log4js#getLogger(String)} to get an instance. - * @constructor - * @param name name of category to log to - * @author Stephan Strittmatter - */ -function Logger (name, level, dispatch) { - this.category = name || DEFAULT_CATEGORY; - - if (level) { - this.setLevel(level); - } - - this.dispatch = dispatch; -} -Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY; -Logger.prototype.level = levels.TRACE; - -Logger.prototype.setLevel = function(level) { - debug("setting level to " + level); - this.level = levels.toLevel(level, this.level || levels.TRACE); -}; - -Logger.prototype.removeLevel = function() { - delete this.level; -}; - -Logger.prototype.log = function() { - var args = Array.prototype.slice.call(arguments) - , logLevel = args.shift() - , loggingEvent = new LoggingEvent(this.category, logLevel, args); - debug("Logging event " + loggingEvent + " to dispatch = " + util.inspect(this.dispatch)); - this.dispatch(loggingEvent); -}; - -Logger.prototype.isLevelEnabled = function(otherLevel) { - return this.level.isLessThanOrEqualTo(otherLevel); -}; - -['Trace','Debug','Info','Warn','Error','Fatal'].forEach( - function(levelString) { - var level = levels.toLevel(levelString); - Logger.prototype['is'+levelString+'Enabled'] = function() { - return this.isLevelEnabled(level); - }; - - Logger.prototype[levelString.toLowerCase()] = function () { - if (this.isLevelEnabled(level)) { - var args = Array.prototype.slice.call(arguments); - args.unshift(level); - Logger.prototype.log.apply(this, args); - } - }; - } -); - -exports.LoggingEvent = LoggingEvent; -exports.Logger = Logger; diff --git a/package.json b/package.json index 418a5419..68b79843 100644 --- a/package.json +++ b/package.json @@ -38,6 +38,8 @@ "vows": "0.7.0", "sandboxed-module": "0.1.3", "hook.io": "0.8.10", - "underscore": "1.2.1" + "underscore": "1.2.1", + "mocha": "~1.12.0", + "should": "~1.2.2" } } diff --git a/test/logger-test.js b/test/logger-test.js index e47e8e46..832a8cd9 100644 --- a/test/logger-test.js +++ b/test/logger-test.js @@ -1,72 +1,52 @@ "use strict"; -var vows = require('vows') -, assert = require('assert') -, levels = require('../lib/levels') -, Logger = require('../lib/logger').Logger -, log4js = require('../lib/log4js'); +var should = require('should') +, Logger = require('../lib/logger'); -vows.describe('../lib/logger').addBatch({ - 'constructor with no parameters': { - topic: new Logger(), - 'should use default category': function(logger) { - assert.equal(logger.category, Logger.DEFAULT_CATEGORY); - }, - 'should use TRACE log level': function(logger) { - assert.equal(logger.level, levels.TRACE); - } - }, +describe('../lib/logger', function() { + describe('Logger constructor', function() { + it('must be passed a dispatch delegate and a category', function() { + (function() { new Logger(); }).should.throw( + "Logger must have a dispatch delegate." + ); + (function() { new Logger(function() {}); }).should.throw( + "Logger must have a category." + ); + }); - 'constructor with category': { - topic: new Logger('cheese'), - 'should use category': function(logger) { - assert.equal(logger.category, 'cheese'); - }, - 'should use TRACE log level': function(logger) { - assert.equal(logger.level, levels.TRACE); - } - }, + }); - 'constructor with category and level': { - topic: new Logger('cheese', 'debug'), - 'should use category': function(logger) { - assert.equal(logger.category, 'cheese'); - }, - 'should use level': function(logger) { - assert.equal(logger.level, levels.DEBUG); - } - }, + describe('Logger instance', function() { + var event + , logger = new Logger( + function(evt) { event = evt; }, + "exciting category" + ); - 'isLevelEnabled': { - topic: new Logger('cheese', 'info'), - 'should provide a level enabled function for all levels': function(logger) { - assert.isFunction(logger.isTraceEnabled); - assert.isFunction(logger.isDebugEnabled); - assert.isFunction(logger.isInfoEnabled); - assert.isFunction(logger.isWarnEnabled); - assert.isFunction(logger.isErrorEnabled); - assert.isFunction(logger.isFatalEnabled); - }, - 'should return the right values': function(logger) { - assert.isFalse(logger.isTraceEnabled()); - assert.isFalse(logger.isDebugEnabled()); - assert.isTrue(logger.isInfoEnabled()); - assert.isTrue(logger.isWarnEnabled()); - assert.isTrue(logger.isErrorEnabled()); - assert.isTrue(logger.isFatalEnabled()); - } - }, + beforeEach(function() { + event = null; + }); + + it('should be immutable', function() { + logger.category = "rubbish"; + logger.debug("thing"); + + event.category.should.equal("exciting category"); + }); + + ['trace', 'debug', 'info', 'warn', 'error', 'fatal'].forEach(function(level) { + it('should have a ' + level + ' function', function() { + logger[level].should.be.a('function'); + }); + }); + + it('should send log events to the dispatch delegate', function() { + logger.debug("interesting thing"); + event.should.have.property('category').equal('exciting category'); + event.should.have.property('level').equal('debug'); + event.should.have.property('data').eql(["interesting thing"]); + event.should.have.property('startTime'); + }); + }); + +}); - 'log': { - topic: function() { - var evt - , logger = new Logger('testing', null, function(event) { evt = event; }); - logger.log(levels.DEBUG, "cheese"); - return evt; - }, - 'should send log events to log4js': function(evt) { - assert.equal(evt.categoryName, 'testing'); - assert.equal(evt.level, levels.DEBUG); - assert.equal(evt.data[0], "cheese"); - } - } -}).exportTo(module); From a8679aced1ce198eadbec4b967659cc7042b8b77 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 21 Aug 2013 08:02:37 +1000 Subject: [PATCH 06/53] simplified levels a bit, converted tests to mocha --- lib/levels.js | 68 +++-- test/levels-test.js | 684 +++++++++++++++++++++++--------------------- 2 files changed, 393 insertions(+), 359 deletions(-) diff --git a/lib/levels.js b/lib/levels.js index 06370997..1dcc800d 100644 --- a/lib/levels.js +++ b/lib/levels.js @@ -34,35 +34,51 @@ Level.prototype.toString = function() { return this.levelStr; }; -Level.prototype.isLessThanOrEqualTo = function(otherLevel) { - if (typeof otherLevel === "string") { - otherLevel = toLevel(otherLevel); +function convertAndCompare(comparison) { + return function(otherLevel) { + if (typeof otherLevel === "string") { + otherLevel = toLevel(otherLevel); + } + return comparison.call(this, otherLevel); + }; +} + +Level.prototype.isLessThanOrEqualTo = convertAndCompare( + function(otherLevel) { + return this.level <= otherLevel.level; } - return this.level <= otherLevel.level; -}; +); -Level.prototype.isGreaterThanOrEqualTo = function(otherLevel) { - if (typeof otherLevel === "string") { - otherLevel = toLevel(otherLevel); +Level.prototype.isGreaterThanOrEqualTo = convertAndCompare( + function(otherLevel) { + return this.level >= otherLevel.level; } - return this.level >= otherLevel.level; -}; +); -Level.prototype.isEqualTo = function(otherLevel) { - if (typeof otherLevel == "string") { - otherLevel = toLevel(otherLevel); +Level.prototype.isEqualTo = convertAndCompare( + function(otherLevel) { + return this.level === otherLevel.level; } - return this.level === otherLevel.level; -}; +); -module.exports = { - ALL: new Level(Number.MIN_VALUE, "ALL"), - TRACE: new Level(5000, "TRACE"), - DEBUG: new Level(10000, "DEBUG"), - INFO: new Level(20000, "INFO"), - WARN: new Level(30000, "WARN"), - ERROR: new Level(40000, "ERROR"), - FATAL: new Level(50000, "FATAL"), - OFF: new Level(Number.MAX_VALUE, "OFF"), - toLevel: toLevel -}; + +exports.ALL = new Level(Number.MIN_VALUE, "ALL"); +exports.TRACE = new Level(5000, "TRACE"); +exports.DEBUG = new Level(10000, "DEBUG"); +exports.INFO = new Level(20000, "INFO"); +exports.WARN = new Level(30000, "WARN"); +exports.ERROR = new Level(40000, "ERROR"); +exports.FATAL = new Level(50000, "FATAL"); +exports.OFF = new Level(Number.MAX_VALUE, "OFF"); + +exports.levels = [ + exports.OFF, + exports.TRACE, + exports.DEBUG, + exports.INFO, + exports.WARN, + exports.ERROR, + exports.FATAL +]; + +exports.toLevel = toLevel; diff --git a/test/levels-test.js b/test/levels-test.js index 99dd1fcb..f3fc6452 100644 --- a/test/levels-test.js +++ b/test/levels-test.js @@ -1,404 +1,422 @@ "use strict"; -var vows = require('vows') -, assert = require('assert') +var assert = require('assert') +, should = require('should') , levels = require('../lib/levels'); function assertThat(level) { - function assertForEach(assertion, test, otherLevels) { + function assertForEach(val, test, otherLevels) { otherLevels.forEach(function(other) { - assertion.call(assert, test.call(level, other)); + test.call(level, other).should.eql(val); }); } return { isLessThanOrEqualTo: function(levels) { - assertForEach(assert.isTrue, level.isLessThanOrEqualTo, levels); + assertForEach(true, level.isLessThanOrEqualTo, levels); }, isNotLessThanOrEqualTo: function(levels) { - assertForEach(assert.isFalse, level.isLessThanOrEqualTo, levels); + assertForEach(false, level.isLessThanOrEqualTo, levels); }, isGreaterThanOrEqualTo: function(levels) { - assertForEach(assert.isTrue, level.isGreaterThanOrEqualTo, levels); + assertForEach(true, level.isGreaterThanOrEqualTo, levels); }, isNotGreaterThanOrEqualTo: function(levels) { - assertForEach(assert.isFalse, level.isGreaterThanOrEqualTo, levels); + assertForEach(false, level.isGreaterThanOrEqualTo, levels); }, isEqualTo: function(levels) { - assertForEach(assert.isTrue, level.isEqualTo, levels); + assertForEach(true, level.isEqualTo, levels); }, isNotEqualTo: function(levels) { - assertForEach(assert.isFalse, level.isEqualTo, levels); + assertForEach(false, level.isEqualTo, levels); } }; } -vows.describe('levels').addBatch({ - 'values': { - topic: levels, - 'should define some levels': function(levels) { - assert.isNotNull(levels.ALL); - assert.isNotNull(levels.TRACE); - assert.isNotNull(levels.DEBUG); - assert.isNotNull(levels.INFO); - assert.isNotNull(levels.WARN); - assert.isNotNull(levels.ERROR); - assert.isNotNull(levels.FATAL); - assert.isNotNull(levels.OFF); - }, - 'ALL': { - topic: levels.ALL, - 'should be less than the other levels': function(all) { - assertThat(all).isLessThanOrEqualTo( - [ - levels.ALL, - levels.TRACE, - levels.DEBUG, - levels.INFO, - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ] - ); - }, - 'should be greater than no levels': function(all) { - assertThat(all).isNotGreaterThanOrEqualTo( - [ - levels.TRACE, - levels.DEBUG, - levels.INFO, - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ] - ); - }, - 'should only be equal to ALL': function(all) { - assertThat(all).isEqualTo([levels.toLevel("ALL")]); - assertThat(all).isNotEqualTo( - [ - levels.TRACE, - levels.DEBUG, - levels.INFO, - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ] - ); - } - }, - 'TRACE': { - topic: levels.TRACE, - 'should be less than DEBUG': function(trace) { - assertThat(trace).isLessThanOrEqualTo( - [ - levels.DEBUG, - levels.INFO, - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ] - ); - assertThat(trace).isNotLessThanOrEqualTo([levels.ALL]); - }, - 'should be greater than ALL': function(trace) { - assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]); - assertThat(trace).isNotGreaterThanOrEqualTo( - [ - levels.DEBUG, - levels.INFO, - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ] - ); - }, - 'should only be equal to TRACE': function(trace) { - assertThat(trace).isEqualTo([levels.toLevel("TRACE")]); - assertThat(trace).isNotEqualTo( - [ - levels.ALL, - levels.DEBUG, - levels.INFO, - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ] - ); - } - }, - 'DEBUG': { - topic: levels.DEBUG, - 'should be less than INFO': function(debug) { - assertThat(debug).isLessThanOrEqualTo( - [ - levels.INFO, - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ] - ); - assertThat(debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]); - }, - 'should be greater than TRACE': function(debug) { - assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]); - assertThat(debug).isNotGreaterThanOrEqualTo( - [ - levels.INFO, - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ] - ); - }, - 'should only be equal to DEBUG': function(trace) { - assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]); - assertThat(trace).isNotEqualTo( - [ - levels.ALL, - levels.TRACE, - levels.INFO, - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ] - ); - } - }, - 'INFO': { - topic: levels.INFO, - 'should be less than WARN': function(info) { - assertThat(info).isLessThanOrEqualTo([ - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ]); - assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]); - }, - 'should be greater than DEBUG': function(info) { - assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]); - assertThat(info).isNotGreaterThanOrEqualTo([ - levels.WARN, - levels.ERROR, - levels.FATAL, - levels.OFF - ]); - }, - 'should only be equal to INFO': function(trace) { - assertThat(trace).isEqualTo([levels.toLevel("INFO")]); - assertThat(trace).isNotEqualTo([ +describe('../lib/levels', function() { + it('should define some levels', function() { + should.exist(levels.ALL); + should.exist(levels.TRACE); + should.exist(levels.DEBUG); + should.exist(levels.INFO); + should.exist(levels.WARN); + should.exist(levels.ERROR); + should.exist(levels.FATAL); + should.exist(levels.OFF); + }); + + describe('ALL', function() { + var all = levels.ALL; + + it('should be less than the other levels', function() { + assertThat(all).isLessThanOrEqualTo( + [ levels.ALL, levels.TRACE, levels.DEBUG, + levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF - ]); - } - }, - 'WARN': { - topic: levels.WARN, - 'should be less than ERROR': function(warn) { - assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]); - assertThat(warn).isNotLessThanOrEqualTo([ - levels.ALL, - levels.TRACE, - levels.DEBUG, - levels.INFO - ]); - }, - 'should be greater than INFO': function(warn) { - assertThat(warn).isGreaterThanOrEqualTo([ - levels.ALL, - levels.TRACE, - levels.DEBUG, - levels.INFO - ]); - assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]); - }, - 'should only be equal to WARN': function(trace) { - assertThat(trace).isEqualTo([levels.toLevel("WARN")]); - assertThat(trace).isNotEqualTo([ - levels.ALL, + ] + ); + }); + + it('should be greater than no levels', function() { + assertThat(all).isNotGreaterThanOrEqualTo( + [ levels.TRACE, levels.DEBUG, levels.INFO, + levels.WARN, levels.ERROR, levels.FATAL, levels.OFF - ]); - } - }, - 'ERROR': { - topic: levels.ERROR, - 'should be less than FATAL': function(error) { - assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.OFF]); - assertThat(error).isNotLessThanOrEqualTo([ - levels.ALL, - levels.TRACE, - levels.DEBUG, - levels.INFO, - levels.WARN - ]); - }, - 'should be greater than WARN': function(error) { - assertThat(error).isGreaterThanOrEqualTo([ - levels.ALL, - levels.TRACE, - levels.DEBUG, - levels.INFO, - levels.WARN - ]); - assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.OFF]); - }, - 'should only be equal to ERROR': function(trace) { - assertThat(trace).isEqualTo([levels.toLevel("ERROR")]); - assertThat(trace).isNotEqualTo([ - levels.ALL, + ] + ); + }); + + it('should only be equal to ALL', function() { + assertThat(all).isEqualTo([levels.toLevel("ALL")]); + assertThat(all).isNotEqualTo( + [ levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, + levels.ERROR, levels.FATAL, levels.OFF - ]); - } - }, - 'FATAL': { - topic: levels.FATAL, - 'should be less than OFF': function(fatal) { - assertThat(fatal).isLessThanOrEqualTo([levels.OFF]); - assertThat(fatal).isNotLessThanOrEqualTo([ - levels.ALL, - levels.TRACE, + ] + ); + }); + }); + + describe('TRACE', function() { + var trace = levels.TRACE; + + it('should be less than DEBUG', function() { + assertThat(trace).isLessThanOrEqualTo( + [ levels.DEBUG, levels.INFO, levels.WARN, - levels.ERROR - ]); - }, - 'should be greater than ERROR': function(fatal) { - assertThat(fatal).isGreaterThanOrEqualTo([ - levels.ALL, - levels.TRACE, + levels.ERROR, + levels.FATAL, + levels.OFF + ] + ); + assertThat(trace).isNotLessThanOrEqualTo([levels.ALL]); + }); + + it('should be greater than ALL', function() { + assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]); + assertThat(trace).isNotGreaterThanOrEqualTo( + [ levels.DEBUG, levels.INFO, levels.WARN, - levels.ERROR - ]); - assertThat(fatal).isNotGreaterThanOrEqualTo([levels.OFF]); - }, - 'should only be equal to FATAL': function(fatal) { - assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]); - assertThat(fatal).isNotEqualTo([ + levels.ERROR, + levels.FATAL, + levels.OFF + ] + ); + }); + + it('should only be equal to TRACE', function() { + assertThat(trace).isEqualTo([levels.toLevel("TRACE")]); + assertThat(trace).isNotEqualTo( + [ levels.ALL, - levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, + levels.FATAL, levels.OFF - ]); - } - }, - 'OFF': { - topic: levels.OFF, - 'should not be less than anything': function(off) { - assertThat(off).isNotLessThanOrEqualTo([ - levels.ALL, - levels.TRACE, - levels.DEBUG, + ] + ); + }); + + }); + + describe('DEBUG', function() { + var debug = levels.DEBUG; + + it('should be less than INFO', function() { + assertThat(debug).isLessThanOrEqualTo( + [ levels.INFO, levels.WARN, levels.ERROR, - levels.FATAL - ]); - }, - 'should be greater than everything': function(off) { - assertThat(off).isGreaterThanOrEqualTo([ - levels.ALL, - levels.TRACE, - levels.DEBUG, + levels.FATAL, + levels.OFF + ] + ); + assertThat(debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]); + }); + + it('should be greater than TRACE', function() { + assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]); + assertThat(debug).isNotGreaterThanOrEqualTo( + [ levels.INFO, levels.WARN, levels.ERROR, - levels.FATAL - ]); - }, - 'should only be equal to OFF': function(off) { - assertThat(off).isEqualTo([levels.toLevel("OFF")]); - assertThat(off).isNotEqualTo([ + levels.FATAL, + levels.OFF + ] + ); + }); + + it('should only be equal to DEBUG', function() { + assertThat(debug).isEqualTo([levels.toLevel("DEBUG")]); + assertThat(debug).isNotEqualTo( + [ levels.ALL, levels.TRACE, - levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, - levels.FATAL - ]); - } - } - }, - 'isGreaterThanOrEqualTo': { - topic: levels.INFO, - 'should handle string arguments': function(info) { + levels.FATAL, + levels.OFF + ] + ); + }); + }); + + describe('INFO', function() { + var info = levels.INFO; + + it('should be less than WARN', function() { + assertThat(info).isLessThanOrEqualTo([ + levels.WARN, + levels.ERROR, + levels.FATAL, + levels.OFF + ]); + assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]); + }); + + it('should be greater than DEBUG', function() { + assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]); + assertThat(info).isNotGreaterThanOrEqualTo([ + levels.WARN, + levels.ERROR, + levels.FATAL, + levels.OFF + ]); + }); + + it('should only be equal to INFO', function() { + assertThat(info).isEqualTo([levels.toLevel("INFO")]); + assertThat(info).isNotEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.WARN, + levels.ERROR, + levels.FATAL, + levels.OFF + ]); + }); + }); + + describe('WARN', function() { + var warn = levels.WARN; + + it('should be less than ERROR', function() { + assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]); + assertThat(warn).isNotLessThanOrEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO + ]); + }); + + it('should be greater than INFO', function() { + assertThat(warn).isGreaterThanOrEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO + ]); + assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]); + }); + + it('should only be equal to WARN', function() { + assertThat(warn).isEqualTo([levels.toLevel("WARN")]); + assertThat(warn).isNotEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.ERROR, + levels.FATAL, + levels.OFF + ]); + }); + }); + + describe('ERROR', function() { + var error = levels.ERROR; + + it('should be less than FATAL', function() { + assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.OFF]); + assertThat(error).isNotLessThanOrEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.WARN + ]); + }); + + it('should be greater than WARN', function() { + assertThat(error).isGreaterThanOrEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.WARN + ]); + assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.OFF]); + }); + + it('should only be equal to ERROR', function() { + assertThat(error).isEqualTo([levels.toLevel("ERROR")]); + assertThat(error).isNotEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.WARN, + levels.FATAL, + levels.OFF + ]); + }); + }); + + describe('FATAL', function() { + var fatal = levels.FATAL; + + it('should be less than OFF', function() { + assertThat(fatal).isLessThanOrEqualTo([levels.OFF]); + assertThat(fatal).isNotLessThanOrEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.WARN, + levels.ERROR + ]); + }); + + it('should be greater than ERROR', function() { + assertThat(fatal).isGreaterThanOrEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.WARN, + levels.ERROR + ]); + assertThat(fatal).isNotGreaterThanOrEqualTo([levels.OFF]); + }); + + it('should only be equal to FATAL', function() { + assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]); + assertThat(fatal).isNotEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.WARN, + levels.ERROR, + levels.OFF + ]); + }); + }); + + describe('OFF', function() { + var off = levels.OFF; + + it('should not be less than anything', function() { + assertThat(off).isNotLessThanOrEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.WARN, + levels.ERROR, + levels.FATAL + ]); + }); + + it('should be greater than everything', function() { + assertThat(off).isGreaterThanOrEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.WARN, + levels.ERROR, + levels.FATAL + ]); + }); + + it('should only be equal to OFF', function() { + assertThat(off).isEqualTo([levels.toLevel("OFF")]); + assertThat(off).isNotEqualTo([ + levels.ALL, + levels.TRACE, + levels.DEBUG, + levels.INFO, + levels.WARN, + levels.ERROR, + levels.FATAL + ]); + }); + }); + + describe('isGreaterThanOrEqualTo', function() { + var info = levels.INFO; + it('should handle string arguments', function() { assertThat(info).isGreaterThanOrEqualTo(["all", "trace", "debug"]); assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']); - } - }, - 'isLessThanOrEqualTo': { - topic: levels.INFO, - 'should handle string arguments': function(info) { + }); + }); + + describe('isLessThanOrEqualTo', function() { + var info = levels.INFO; + it('should handle string arguments', function() { assertThat(info).isNotLessThanOrEqualTo(["all", "trace", "debug"]); assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']); - } - }, - 'isEqualTo': { - topic: levels.INFO, - 'should handle string arguments': function(info) { + }); + }); + + describe('isEqualTo', function() { + var info = levels.INFO; + it('should handle string arguments', function() { assertThat(info).isEqualTo(["info", "INFO", "iNfO"]); - } - }, - 'toLevel': { - 'with lowercase argument': { - topic: levels.toLevel("debug"), - 'should take the string and return the corresponding level': function(level) { - assert.equal(level, levels.DEBUG); - } - }, - 'with uppercase argument': { - topic: levels.toLevel("DEBUG"), - 'should take the string and return the corresponding level': function(level) { - assert.equal(level, levels.DEBUG); - } - }, - 'with varying case': { - topic: levels.toLevel("DeBuG"), - 'should take the string and return the corresponding level': function(level) { - assert.equal(level, levels.DEBUG); - } - }, - 'with unrecognised argument': { - topic: levels.toLevel("cheese"), - 'should return undefined': function(level) { - assert.isUndefined(level); - } - }, - 'with unrecognised argument and default value': { - topic: levels.toLevel("cheese", levels.DEBUG), - 'should return default value': function(level) { - assert.equal(level, levels.DEBUG); - } - } - } -}).export(module); + }); + }); + + describe('toLevel', function() { + it('should ignore the case of arguments', function() { + levels.toLevel("debug").should.eql(levels.DEBUG); + levels.toLevel("DEBUG").should.eql(levels.DEBUG); + levels.toLevel("DeBuG").should.eql(levels.DEBUG); + }); + + it('should return undefined when argument is not recognised', function() { + should.not.exist(levels.toLevel("cheese")); + }); + + it('should return the default value if argument is not recognised', function() { + levels.toLevel("cheese", levels.DEBUG).should.eql(levels.DEBUG); + }); + }); + +}); From eb875b6d98b6b8c5c6a173532f4c1650d3640454 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 21 Aug 2013 08:04:26 +1000 Subject: [PATCH 07/53] mocha tests for new log4js --- lib/log4js.js | 252 +++++++++++++++++--------------------- test/log4js-test.js | 291 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 404 insertions(+), 139 deletions(-) create mode 100644 test/log4js-test.js diff --git a/lib/log4js.js b/lib/log4js.js index 22ac913d..c781a307 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -44,25 +44,21 @@ * Website: http://log4js.berlios.de */ var debug = require('./debug')('core') -, weak = require('weak') , fs = require('fs') -, path = require('path') , util = require('util') , layouts = require('./layouts') , levels = require('./levels') -, LoggingEvent = require('./logger').LoggingEvent -, Logger = require('./logger').Logger -, loggerRefs = [] -, categoryLevels = {} -, globalLogLevel = null -, ALL_CATEGORIES = '[all]' +, Logger = require('./logger') , appenders = {} +, categories = {} , appenderMakers = {} , defaultConfig = { - appenders: [ - { type: "console" } - ], - replaceConsole: false + appenders: { + console: { type: "console" } + }, + categories: { + default: { level: levels.DEBUG, appenders: [ "console" ] } + } }; /** @@ -72,100 +68,9 @@ var debug = require('./debug')('core') * @static */ function getLogger (categoryName) { - var level, logger, ref; - - // Use default logger if categoryName is not specified or invalid - if (typeof categoryName !== "string") { - categoryName = Logger.DEFAULT_CATEGORY; - } - - level = categoryLevels[categoryName]; - - if (globalLogLevel) { - level = globalLogLevel; - } - - debug("getLogger(" + categoryName + ") - level is " + level); - logger = new Logger(categoryName, level || null, dispatch); - ref = weak(logger); - loggerRefs.push(ref); - - return logger; -} - -/** - * args are appender, then zero or more categories - */ -function addAppender () { - var args = Array.prototype.slice.call(arguments); - var appender = args.shift(); - if (args.length === 0 || args[0] === undefined) { - args = [ ALL_CATEGORIES ]; - } - //argument may already be an array - if (Array.isArray(args[0])) { - args = args[0]; - } - - args.forEach(function(category) { - addAppenderToCategory(appender, category); - }); -} - -function addAppenderToCategory(appender, category) { - debug("adding appender " + appender + " to category " + category); - if (!appenders[category]) { - appenders[category] = []; - } - appenders[category].push(appender); -} - -function clearAppenders () { - debug("clearing appenders"); - appenders = {}; -} - -function configureAppenders(appenderList, options) { - clearAppenders(); - if (appenderList) { - appenderList.forEach(function(appenderConfig) { - loadAppender(appenderConfig.type); - var appender; - appenderConfig.makers = appenderMakers; - try { - appender = appenderMakers[appenderConfig.type](appenderConfig, options); - addAppender(appender, appenderConfig.category); - } catch(e) { - throw new Error("log4js configuration problem for " + util.inspect(appenderConfig), e); - } - }); - } -} + debug("getLogger(" + categoryName + ")"); -function configureLevels(levels) { - categoryLevels = levels || {}; -} - -function setGlobalLogLevel(level) { - //Logger.prototype.level = levels.toLevel(level, levels.TRACE); - globalLogLevel = level; - var workingRefs = []; - loggerRefs.forEach(function(logger) { - if (logger.setLevel) { - workingRefs.push(logger); - logger.setLevel(level); - } - }); - loggerRefs = workingRefs; -} - -/** - * Get the default logger instance. - * @return {Logger} instance of default logger - * @static - */ -function getDefaultLogger () { - return getLogger(Logger.DEFAULT_CATEGORY); + return new Logger(dispatch, categoryName || 'default'); } /** @@ -174,25 +79,17 @@ function getDefaultLogger () { */ function dispatch(event) { debug("event is " + util.inspect(event)); - debug("appenders is " + util.inspect(appenders)); - if (appenders[event.categoryName]) { - debug("appender defined for " + event.categoryName); - dispatchToCategory(event.categoryName, event); - } + var category = categories[event.category] || categories.default; + debug("category.level[" + category.level + "] <= " + event.level + " ? " + category.level.isLessThanOrEqualTo(event.level)); - if (appenders[ALL_CATEGORIES]) { - debug("appender defined for " + ALL_CATEGORIES); - dispatchToCategory(ALL_CATEGORIES, event); + if (category.level.isLessThanOrEqualTo(event.level)) { + category.appenders.forEach(function(appender) { + appenders[appender](event); + }); } } -function dispatchToCategory(category, event) { - appenders[category].forEach(function(appender) { - debug("Sending " + util.inspect(event) + " to appender " + appender); - appender(event); - }); -} - +/* var configState = {}; function loadConfigurationFile(filename) { @@ -251,8 +148,34 @@ function initReloadConfiguration(filename, options) { configState.lastMTime = getMTime(filename); configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000); } +*/ + +function load(file) { + return JSON.parse(fs.readFileSync(file, "utf-8")); +} -function configure(configurationFileOrObject, options) { +function configure(configurationFileOrObject) { + var filename, config = configurationFileOrObject || process.env.LOG4JS_CONFIG; + + if (!config || !(typeof config === 'string' || typeof config === 'object')) { + throw new Error("You must specify configuration as an object or a filename."); + } + + if (typeof config === 'string') { + filename = config; + config = load(filename); + } + + if (!config.appenders || !Object.keys(config.appenders).length) { + throw new Error("You must specify at least one appender."); + } + + configureAppenders(config.appenders); + + validateCategories(config.categories); + categories = config.categories; + +/* var config = configurationFileOrObject; config = config || process.env.LOG4JS_CONFIG; options = options || {}; @@ -270,8 +193,70 @@ function configure(configurationFileOrObject, options) { } } configureOnceOff(config, options); +*/ +} + +function validateCategories(cats) { + if (!cats || !cats.default) { + throw new Error("You must specify an appender for the default category"); + } + + Object.keys(cats).forEach(function(categoryName) { + var category = cats[categoryName], inputLevel = category.level; + if (!category.level) { + throw new Error("You must specify a level for category '" + categoryName + "'."); + } + category.level = levels.toLevel(inputLevel); + if (!category.level) { + throw new Error("Level '" + inputLevel + "' is not valid for category '" + categoryName + "'. Acceptable values are: " + levels.levels.join(', ') + "."); + } + + if (!category.appenders || !category.appenders.length) { + throw new Error("You must specify an appender for category '" + categoryName + "'."); + } + + category.appenders.forEach(function(appender) { + if (!appenders[appender]) { + throw new Error("Appender '" + appender + "' for category '" + categoryName + "' does not exist. Known appenders are: " + Object.keys(appenders).join(', ') + "."); + } + }); + }); +} + +function clearAppenders () { + debug("clearing appenders"); + appenders = {}; } +function configureAppenders(appenderMap) { + clearAppenders(); + Object.keys(appenderMap).forEach(function(appenderName) { + var appender, appenderConfig = appenderMap[appenderName]; + loadAppender(appenderConfig.type); + appenderConfig.makers = appenderMakers; + try { + appenders[appenderName] = appenderMakers[appenderConfig.type](appenderConfig); + } catch(e) { + throw new Error("log4js configuration problem for appender '" + appenderName + "'. Error was " + e.stack); + } + }); +} + +function loadAppender(appender) { + var appenderModule; + try { + appenderModule = require('./appenders/' + appender); + } catch (e) { + try { + appenderModule = require(appender); + } catch (err) { + throw new Error("Could not load appender of type '" + appender + "'."); + } + } + appenderMakers[appender] = appenderModule.configure.bind(appenderModule); +} + +/* var originalConsoleFunctions = { log: console.log, debug: console.debug, @@ -298,40 +283,29 @@ function restoreConsole() { }); } -function loadAppender(appender) { - var appenderModule; - try { - appenderModule = require('./appenders/' + appender); - } catch (e) { - appenderModule = require(appender); - } - module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule); - appenderMakers[appender] = appenderModule.configure.bind(appenderModule); -} - +*/ module.exports = { getLogger: getLogger, - getDefaultLogger: getDefaultLogger, - dispatch: dispatch, - +/* addAppender: addAppender, loadAppender: loadAppender, clearAppenders: clearAppenders, +*/ configure: configure, - +/* replaceConsole: replaceConsole, restoreConsole: restoreConsole, levels: levels, - setGlobalLogLevel: setGlobalLogLevel, layouts: layouts, appenders: {}, appenderMakers: appenderMakers, connectLogger: require('./connect-logger').connectLogger +*/ }; //set ourselves up -debug("Starting configuration"); -configure(); +//debug("Starting configuration"); +//configure(); diff --git a/test/log4js-test.js b/test/log4js-test.js new file mode 100644 index 00000000..40742c90 --- /dev/null +++ b/test/log4js-test.js @@ -0,0 +1,291 @@ +"use strict"; +var should = require('should') +, fs = require('fs') +, sandbox = require('sandboxed-module') +, log4js = require('../lib/log4js'); + +describe('../lib/log4js', function() { + describe('#getLogger', function() { + it('should return a Logger', function() { + log4js.getLogger().should.have.property('debug').be.a('function'); + log4js.getLogger().should.have.property('info').be.a('function'); + log4js.getLogger().should.have.property('error').be.a('function'); + }); + }); + + describe('#configure', function() { + it('should require an object or a filename', function() { + [ + undefined, + null, + true, + 42, + function() {} + ].forEach(function(arg) { + (function() { log4js.configure(arg); }).should.throw( + "You must specify configuration as an object or a filename." + ); + }); + }); + + it('should complain if the file cannot be found', function() { + (function() { log4js.configure("pants"); }).should.throw( + "ENOENT, no such file or directory 'pants'" + ); + }); + + it('should pick up the configuration filename from env.LOG4JS_CONFIG', function() { + process.env.LOG4JS_CONFIG = 'made-up-file'; + (function() { log4js.configure(); }).should.throw( + "ENOENT, no such file or directory 'made-up-file'" + ); + process.env.LOG4JS_CONFIG = null; + }); + + it('should complain if the config does not specify any appenders', function() { + + (function() { log4js.configure({}); }).should.throw( + "You must specify at least one appender." + ); + + (function() { log4js.configure({ appenders: {} }); }).should.throw( + "You must specify at least one appender." + ); + + }); + + it('should complain if the config does not specify an appender for the default category', function() { + + (function() { + log4js.configure( + { + appenders: { + "console": { type: "console" } + }, + categories: {} + } + ); + }).should.throw( + "You must specify an appender for the default category" + ); + + (function() { + log4js.configure({ + appenders: { + "console": { type: "console" } + }, + categories: { + "cheese": { level: "DEBUG", appenders: [ "console" ] } + } + }); + }).should.throw( + "You must specify an appender for the default category" + ); + + }); + + it('should complain if a category does not specify level or appenders', function() { + (function() { + log4js.configure( + { appenders: { "console": { type: "console" } }, + categories: { + "default": { thing: "thing" } + } + } + ); + }).should.throw( + "You must specify a level for category 'default'." + ); + + (function() { + log4js.configure( + { appenders: { "console": { type: "console" } }, + categories: { + "default": { level: "DEBUG" } + } + } + ); + }).should.throw( + "You must specify an appender for category 'default'." + ); + }); + + it('should complain if a category specifies a level that does not exist', function() { + (function() { + log4js.configure( + { appenders: { "console": { type: "console" }}, + categories: { + "default": { level: "PICKLES" } + } + } + ); + }).should.throw( + "Level 'PICKLES' is not valid for category 'default'. " + + "Acceptable values are: OFF, TRACE, DEBUG, INFO, WARN, ERROR, FATAL." + ); + }); + + it('should complain if a category specifies an appender that does not exist', function() { + (function() { + log4js.configure( + { appenders: { "console": { type: "console" }}, + categories: { + "default": { level: "DEBUG", appenders: [ "cheese" ] } + } + } + ); + }).should.throw( + "Appender 'cheese' for category 'default' does not exist. Known appenders are: console." + ); + }); + + before(function(done) { + fs.unlink("test.log", function (err) { done(); }); + }); + + it('should set up the included appenders', function(done) { + log4js.configure({ + appenders: { + "file": { type: "file", filename: "test.log" } + }, + categories: { + default: { level: "DEBUG", appenders: [ "file" ] } + } + }); + log4js.getLogger('test').debug("cheese"); + + setTimeout(function() { + fs.readFile("test.log", "utf-8", function(err, contents) { + contents.should.include("cheese"); + done(err); + }); + }, 50); + }); + + after(function(done) { + fs.unlink("test.log", function (err) { done(); }); + }); + + it('should set up third-party appenders', function() { + var events = [], log4js_sandbox = sandbox.require( + '../lib/log4js', + { + requires: { + 'cheese': { + configure: function() { + return function(evt) { events.push(evt); }; + } + } + } + } + ); + log4js_sandbox.configure({ + appenders: { + "thing": { type: "cheese" } + }, + categories: { + default: { level: "DEBUG", appenders: [ "thing" ] } + } + }); + log4js_sandbox.getLogger().info("edam"); + + events.should.have.length(1); + events[0].data[0].should.eql("edam"); + + }); + + it('should complain about unknown appenders', function() { + (function() { + log4js.configure({ + appenders: { + "thing": { type: "madeupappender" } + }, + categories: { + default: { level: "DEBUG", appenders: [ "thing" ] } + } + }); + }).should.throw( + "Could not load appender of type 'madeupappender'." + ); + }); + + it('should read config from a file', function() { + var events = [], log4js_sandbox = sandbox.require( + '../lib/log4js', + { requires: + { + 'cheese': { + configure: function() { + return function(event) { events.push(event); }; + } + } + } + } + ); + + log4js_sandbox.configure(__dirname + "/with-cheese.json"); + log4js_sandbox.getLogger().debug("gouda"); + + events.should.have.length(1); + events[0].data[0].should.eql("gouda"); + }); + + it('should set up log levels for categories', function() { + var events = [] + , noisyLogger + , log4js_sandbox = sandbox.require( + '../lib/log4js', + { requires: + { + 'cheese': { + configure: function() { + return function(event) { events.push(event); }; + } + } + } + } + ); + + log4js_sandbox.configure(__dirname + "/with-cheese.json"); + noisyLogger = log4js_sandbox.getLogger("noisy"); + noisyLogger.debug("pow"); + noisyLogger.info("crash"); + noisyLogger.warn("bang"); + noisyLogger.error("boom"); + noisyLogger.fatal("aargh"); + + events.should.have.length(2); + events[0].data[0].should.eql("boom"); + events[1].data[0].should.eql("aargh"); + + }); + + it('should have a default log level for all categories', function() { + var events = [] + , log4js_sandbox = sandbox.require( + '../lib/log4js', + { requires: + { + 'cheese': { + configure: function() { + return function(event) { events.push(event); }; + } + } + } + } + ); + + //with-cheese.json only specifies categories noisy and default + //unspecified categories should use the default category config + log4js_sandbox.configure(__dirname + "/with-cheese.json"); + log4js_sandbox.getLogger("surprise").trace("not seen"); + log4js_sandbox.getLogger("surprise").info("should be seen"); + + events.should.have.length(1); + events[0].data[0].should.eql("should be seen"); + + }); + + it('should reload configuration if specified'); + }); +}); From a0d03734807590d9c0fd96938454c19c15b5f3eb Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 21 Aug 2013 08:05:28 +1000 Subject: [PATCH 08/53] convert level string to Level --- lib/logger.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/logger.js b/lib/logger.js index 7f85956f..06db5845 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -1,5 +1,6 @@ "use strict"; var debug = require('./debug')('logger') +, levels = require('./levels') , util = require('util'); module.exports = function Logger(dispatch, category) { @@ -44,6 +45,6 @@ function LoggingEvent (category, level, data) { this.startTime = new Date(); this.category = category; this.data = data; - this.level = level; + this.level = levels.toLevel(level); } From ab1c81a61c290adaa44e1607bc27e237f6b0b136 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 21 Aug 2013 08:07:47 +1000 Subject: [PATCH 09/53] tests covered by test/log4js-test.js --- test/configuration-test.js | 134 ------------------------------------- 1 file changed, 134 deletions(-) delete mode 100644 test/configuration-test.js diff --git a/test/configuration-test.js b/test/configuration-test.js deleted file mode 100644 index e198de90..00000000 --- a/test/configuration-test.js +++ /dev/null @@ -1,134 +0,0 @@ -"use strict"; -var assert = require('assert') -, vows = require('vows') -, sandbox = require('sandboxed-module'); - -function makeTestAppender() { - return { - configure: function(config, options) { - this.configureCalled = true; - this.config = config; - this.options = options; - return this.appender(); - }, - appender: function() { - var self = this; - return function(logEvt) { self.logEvt = logEvt; }; - } - }; -} - -vows.describe('log4js configure').addBatch({ - 'appenders': { - 'when specified by type': { - topic: function() { - var testAppender = makeTestAppender(), - log4js = sandbox.require( - '../lib/log4js', - { - requires: { - './appenders/cheese': testAppender - } - } - ); - log4js.configure( - { - appenders: [ - { type: "cheese", flavour: "gouda" } - ] - }, - { pants: "yes" } - ); - return testAppender; - }, - 'should load appender': function(testAppender) { - assert.ok(testAppender.configureCalled); - }, - 'should pass config to appender': function(testAppender) { - assert.equal(testAppender.config.flavour, 'gouda'); - }, - 'should pass log4js options to appender': function(testAppender) { - assert.equal(testAppender.options.pants, 'yes'); - } - }, - 'when core appender loaded via loadAppender': { - topic: function() { - var testAppender = makeTestAppender(), - log4js = sandbox.require( - '../lib/log4js', - { requires: { './appenders/cheese': testAppender } } - ); - - log4js.loadAppender('cheese'); - return log4js; - }, - 'should load appender from ../lib/appenders': function(log4js) { - assert.ok(log4js.appenders.cheese); - }, - 'should add appender configure function to appenderMakers' : function(log4js) { - assert.isFunction(log4js.appenderMakers.cheese); - } - }, - 'when appender in node_modules loaded via loadAppender': { - topic: function() { - var testAppender = makeTestAppender(), - log4js = sandbox.require( - '../lib/log4js', - { requires: { 'some/other/external': testAppender } } - ); - log4js.loadAppender('some/other/external'); - return log4js; - }, - 'should load appender via require': function(log4js) { - assert.ok(log4js.appenders['some/other/external']); - }, - 'should add appender configure function to appenderMakers': function(log4js) { - assert.isFunction(log4js.appenderMakers['some/other/external']); - } - }, - 'when configuration file loaded via LOG4JS_CONFIG environment variable': { - topic: function() { - process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json'; - var fileRead = 0, - modulePath = 'some/path/to/mylog4js.json', - pathsChecked = [], - mtime = new Date(), - fakeFS = { - config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ], - levels: { 'a-test' : 'INFO' } }, - readdirSync: function(dir) { - return require('fs').readdirSync(dir); - }, - readFileSync: function (file, encoding) { - fileRead += 1; - assert.isString(file); - assert.equal(file, modulePath); - assert.equal(encoding, 'utf8'); - return JSON.stringify(fakeFS.config); - }, - statSync: function (path) { - pathsChecked.push(path); - if (path === modulePath) { - return { mtime: mtime }; - } else { - throw new Error("no such file"); - } - } - }, - log4js = sandbox.require( - '../lib/log4js', - { - requires: { - 'fs': fakeFS, - } - } - ); - delete process.env.LOG4JS_CONFIG; - return fileRead; - }, - 'should load the specified local configuration file' : function(fileRead) { - assert.equal(fileRead, 1); - } - } - } -}).exportTo(module); From 631ca75e57fa9c483264fca72624eb32b8a47773 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 21 Aug 2013 08:13:50 +1000 Subject: [PATCH 10/53] no longer needed - log4js now throws exceptions instead of accepting invalid config --- test/configureNoLevels-test.js | 173 --------------------------------- 1 file changed, 173 deletions(-) delete mode 100644 test/configureNoLevels-test.js diff --git a/test/configureNoLevels-test.js b/test/configureNoLevels-test.js deleted file mode 100644 index 55bd987b..00000000 --- a/test/configureNoLevels-test.js +++ /dev/null @@ -1,173 +0,0 @@ -"use strict"; -// This test shows unexpected behaviour for log4js.configure() in log4js-node@0.4.3 and earlier: -// 1) log4js.configure(), log4js.configure(null), -// log4js.configure({}), log4js.configure() -// all set all loggers levels to trace, even if they were previously set to something else. -// 2) log4js.configure({levels:{}}), log4js.configure({levels: {foo: -// bar}}) leaves previously set logger levels intact. -// - -// Basic set up -var vows = require('vows'); -var assert = require('assert'); -var toLevel = require('../lib/levels').toLevel; - -// uncomment one or other of the following to see progress (or not) while running the tests -// var showProgress = console.log; -var showProgress = function() {}; - - -// Define the array of levels as string to iterate over. -var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal']; - -// setup the configurations we want to test -var configs = { - 'nop': 'nop', // special case where the iterating vows generator will not call log4js.configure - 'is undefined': undefined, - 'is null': null, - 'is empty': {}, - 'has no levels': {foo: 'bar'}, - 'has null levels': {levels: null}, - 'has empty levels': {levels: {}}, - 'has random levels': {levels: {foo: 'bar'}}, - 'has some valid levels': {levels: {A: 'INFO'}} -}; - -// Set up the basic vows batches for this test -var batches = []; - - -function getLoggerName(level) { - return level+'-logger'; -} - -// the common vows top-level context, whether log4js.configure is called or not -// just making sure that the code is common, -// so that there are no spurious errors in the tests themselves. -function getTopLevelContext(nop, configToTest, name) { - return { - topic: function() { - var log4js = require('../lib/log4js'); - // create loggers for each level, - // keeping the level in the logger's name for traceability - strLevels.forEach(function(l) { - log4js.getLogger(getLoggerName(l)).setLevel(l); - }); - - if (!nop) { - showProgress('** Configuring log4js with', configToTest); - log4js.configure(configToTest); - } - else { - showProgress('** Not configuring log4js'); - } - return log4js; - } - }; -} - -showProgress('Populating batch object...'); - -function checkForMismatch(topic) { - var er = topic.log4js.levels.toLevel(topic.baseLevel) - .isLessThanOrEqualTo(topic.log4js.levels.toLevel(topic.comparisonLevel)); - - assert.equal( - er, - topic.expectedResult, - 'Mismatch: for setLevel(' + topic.baseLevel + - ') was expecting a comparison with ' + topic.comparisonLevel + - ' to be ' + topic.expectedResult - ); -} - -function checkExpectedResult(topic) { - var result = topic.log4js - .getLogger(getLoggerName(topic.baseLevel)) - .isLevelEnabled(topic.log4js.levels.toLevel(topic.comparisonLevel)); - - assert.equal( - result, - topic.expectedResult, - 'Failed: ' + getLoggerName(topic.baseLevel) + - '.isLevelEnabled( ' + topic.comparisonLevel + ' ) returned ' + result - ); -} - -function setupBaseLevelAndCompareToOtherLevels(baseLevel) { - var baseLevelSubContext = 'and checking the logger whose level was set to '+baseLevel ; - var subContext = { topic: baseLevel }; - batch[context][baseLevelSubContext] = subContext; - - // each logging level has strLevels sub-contexts, - // to exhaustively test all the combinations of - // setLevel(baseLevel) and isLevelEnabled(comparisonLevel) per config - strLevels.forEach(compareToOtherLevels(subContext)); -} - -function compareToOtherLevels(subContext) { - var baseLevel = subContext.topic; - - return function (comparisonLevel) { - var comparisonLevelSubContext = 'with isLevelEnabled('+comparisonLevel+')'; - - // calculate this independently of log4js, but we'll add a vow - // later on to check that we're not mismatched with log4js - var expectedResult = strLevels.indexOf(baseLevel) <= strLevels.indexOf(comparisonLevel); - - // the topic simply gathers all the parameters for the vow - // into an object, to simplify the vow's work. - subContext[comparisonLevelSubContext] = { - topic: function(baseLevel, log4js) { - return { - comparisonLevel: comparisonLevel, - baseLevel: baseLevel, - log4js: log4js, - expectedResult: expectedResult - }; - } - }; - - var vow = 'should return '+expectedResult; - subContext[comparisonLevelSubContext][vow] = checkExpectedResult; - - // the extra vow to check the comparison between baseLevel and - // comparisonLevel we performed earlier matches log4js' - // comparison too - var subSubContext = subContext[comparisonLevelSubContext]; - subSubContext['finally checking for comparison mismatch with log4js'] = checkForMismatch; - }; -} - -// Populating the batches programmatically, as there are -// (configs.length x strLevels.length x strLevels.length) = 324 -// possible test combinations -for (var cfg in configs) { - var configToTest = configs[cfg]; - var nop = configToTest === 'nop'; - var context; - if (nop) { - context = 'Setting up loggers with initial levels, then NOT setting a configuration,'; - } - else { - context = 'Setting up loggers with initial levels, then setting a configuration which '+cfg+','; - } - - showProgress('Setting up the vows batch and context for '+context); - // each config to be tested has its own vows batch with a single top-level context - var batch={}; - batch[context]= getTopLevelContext(nop, configToTest, context); - batches.push(batch); - - // each top-level context has strLevels sub-contexts, one per logger - // which has set to a specific level in the top-level context's topic - strLevels.forEach(setupBaseLevelAndCompareToOtherLevels); -} - -showProgress('Running tests'); -var v = vows.describe('log4js.configure(), with or without a "levels" property'); - -batches.forEach(function(batch) {v=v.addBatch(batch);}); - -v.export(module); - From a0a5a3aa9928e79a2c0e338d307512efd8cc68fe Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 08:01:57 +1000 Subject: [PATCH 11/53] moved connect-logger to log4js-connect --- lib/connect-logger.js | 194 ------------------------------- test/connect-logger-test.js | 226 ------------------------------------ 2 files changed, 420 deletions(-) delete mode 100644 lib/connect-logger.js delete mode 100644 test/connect-logger-test.js diff --git a/lib/connect-logger.js b/lib/connect-logger.js deleted file mode 100644 index a441be55..00000000 --- a/lib/connect-logger.js +++ /dev/null @@ -1,194 +0,0 @@ -"use strict"; -var levels = require("./levels"); -var DEFAULT_FORMAT = ':remote-addr - -' + - ' ":method :url HTTP/:http-version"' + - ' :status :content-length ":referrer"' + - ' ":user-agent"'; -/** - * Log requests with the given `options` or a `format` string. - * - * Options: - * - * - `format` Format string, see below for tokens - * - `level` A log4js levels instance. Supports also 'auto' - * - * Tokens: - * - * - `:req[header]` ex: `:req[Accept]` - * - `:res[header]` ex: `:res[Content-Length]` - * - `:http-version` - * - `:response-time` - * - `:remote-addr` - * - `:date` - * - `:method` - * - `:url` - * - `:referrer` - * - `:user-agent` - * - `:status` - * - * @param {String|Function|Object} format or options - * @return {Function} - * @api public - */ - -function getLogger(logger4js, options) { - if ('object' == typeof options) { - options = options || {}; - } else if (options) { - options = { format: options }; - } else { - options = {}; - } - - var thislogger = logger4js - , level = levels.toLevel(options.level, levels.INFO) - , fmt = options.format || DEFAULT_FORMAT - , nolog = options.nolog ? createNoLogCondition(options.nolog) : null; - - return function (req, res, next) { - // mount safety - if (req._logging) return next(); - - // nologs - if (nolog && nolog.test(req.originalUrl)) return next(); - if (thislogger.isLevelEnabled(level) || options.level === 'auto') { - - var start = new Date() - , statusCode - , writeHead = res.writeHead - , end = res.end - , url = req.originalUrl; - - // flag as logging - req._logging = true; - - // proxy for statusCode. - res.writeHead = function(code, headers){ - res.writeHead = writeHead; - res.writeHead(code, headers); - res.__statusCode = statusCode = code; - res.__headers = headers || {}; - - //status code response level handling - if(options.level === 'auto'){ - level = levels.INFO; - if(code >= 300) level = levels.WARN; - if(code >= 400) level = levels.ERROR; - } else { - level = levels.toLevel(options.level, levels.INFO); - } - }; - - // proxy end to output a line to the provided logger. - res.end = function(chunk, encoding) { - res.end = end; - res.end(chunk, encoding); - res.responseTime = new Date() - start; - if (thislogger.isLevelEnabled(level)) { - if (typeof fmt === 'function') { - var line = fmt(req, res, function(str){ return format(str, req, res); }); - if (line) thislogger.log(level, line); - } else { - thislogger.log(level, format(fmt, req, res)); - } - } - }; - } - - //ensure next gets always called - next(); - }; -} - -/** - * Return formatted log line. - * - * @param {String} str - * @param {IncomingMessage} req - * @param {ServerResponse} res - * @return {String} - * @api private - */ - -function format(str, req, res) { - return str - .replace(':url', req.originalUrl) - .replace(':method', req.method) - .replace(':status', res.__statusCode || res.statusCode) - .replace(':response-time', res.responseTime) - .replace(':date', new Date().toUTCString()) - .replace(':referrer', req.headers.referer || req.headers.referrer || '') - .replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor) - .replace( - ':remote-addr', - req.socket && - (req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress)) - ) - .replace(':user-agent', req.headers['user-agent'] || '') - .replace( - ':content-length', - (res._headers && res._headers['content-length']) || - (res.__headers && res.__headers['Content-Length']) || - '-' - ) - .replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; }) - .replace(/:res\[([^\]]+)\]/g, function(_, field){ - return res._headers ? - (res._headers[field.toLowerCase()] || res.__headers[field]) - : (res.__headers && res.__headers[field]); - }); -} - -/** - * Return RegExp Object about nolog - * - * @param {String} nolog - * @return {RegExp} - * @api private - * - * syntax - * 1. String - * 1.1 "\\.gif" - * NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga - * LOGGING http://example.com/hoge.agif - * 1.2 in "\\.gif|\\.jpg$" - * NOT LOGGING http://example.com/hoge.gif and - * http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga - * LOGGING http://example.com/hoge.agif, - * http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge - * 1.3 in "\\.(gif|jpe?g|png)$" - * NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg - * LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3 - * 2. RegExp - * 2.1 in /\.(gif|jpe?g|png)$/ - * SAME AS 1.3 - * 3. Array - * 3.1 ["\\.jpg$", "\\.png", "\\.gif"] - * SAME AS "\\.jpg|\\.png|\\.gif" - */ -function createNoLogCondition(nolog) { - var regexp = null; - - if (nolog) { - if (nolog instanceof RegExp) { - regexp = nolog; - } - - if (typeof nolog === 'string') { - regexp = new RegExp(nolog); - } - - if (Array.isArray(nolog)) { - var regexpsAsStrings = nolog.map( - function convertToStrings(o) { - return o.source ? o.source : o; - } - ); - regexp = new RegExp(regexpsAsStrings.join('|')); - } - } - - return regexp; -} - -exports.connectLogger = getLogger; diff --git a/test/connect-logger-test.js b/test/connect-logger-test.js deleted file mode 100644 index 5a0caa7b..00000000 --- a/test/connect-logger-test.js +++ /dev/null @@ -1,226 +0,0 @@ -/* jshint maxparams:7 */ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, levels = require('../lib/levels'); - -function MockLogger() { - - var that = this; - this.messages = []; - - this.log = function(level, message, exception) { - that.messages.push({ level: level, message: message }); - }; - - this.isLevelEnabled = function(level) { - return level.isGreaterThanOrEqualTo(that.level); - }; - - this.level = levels.TRACE; - -} - -function MockRequest(remoteAddr, method, originalUrl, headers) { - - this.socket = { remoteAddress: remoteAddr }; - this.originalUrl = originalUrl; - this.method = method; - this.httpVersionMajor = '5'; - this.httpVersionMinor = '0'; - this.headers = headers || {}; - - var self = this; - Object.keys(this.headers).forEach(function(key) { - self.headers[key.toLowerCase()] = self.headers[key]; - }); -} - -function MockResponse() { - - this.end = function(chunk, encoding) { - }; - - this.writeHead = function(code, headers) { - }; - -} - -function request(cl, method, url, code, reqHeaders, resHeaders) { - var req = new MockRequest('my.remote.addr', method, url, reqHeaders); - var res = new MockResponse(); - cl(req, res, function() {}); - res.writeHead(code, resHeaders); - res.end('chunk','encoding'); -} - -vows.describe('log4js connect logger').addBatch({ - 'getConnectLoggerModule': { - topic: function() { - var clm = require('../lib/connect-logger'); - return clm; - }, - - 'should return a "connect logger" factory' : function(clm) { - assert.isObject(clm); - }, - - 'take a log4js logger and return a "connect logger"' : { - topic: function(clm) { - var ml = new MockLogger(); - var cl = clm.connectLogger(ml); - return cl; - }, - - 'should return a "connect logger"': function(cl) { - assert.isFunction(cl); - } - }, - - 'log events' : { - topic: function(clm) { - var ml = new MockLogger(); - var cl = clm.connectLogger(ml); - request(cl, 'GET', 'http://url', 200); - return ml.messages; - }, - - 'check message': function(messages) { - assert.isArray(messages); - assert.equal(messages.length, 1); - assert.ok(levels.INFO.isEqualTo(messages[0].level)); - assert.include(messages[0].message, 'GET'); - assert.include(messages[0].message, 'http://url'); - assert.include(messages[0].message, 'my.remote.addr'); - assert.include(messages[0].message, '200'); - } - }, - - 'log events with level below logging level' : { - topic: function(clm) { - var ml = new MockLogger(); - ml.level = levels.FATAL; - var cl = clm.connectLogger(ml); - request(cl, 'GET', 'http://url', 200); - return ml.messages; - }, - - 'check message': function(messages) { - assert.isArray(messages); - assert.isEmpty(messages); - } - }, - - 'log events with non-default level and custom format' : { - topic: function(clm) { - var ml = new MockLogger(); - ml.level = levels.INFO; - var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' } ); - request(cl, 'GET', 'http://url', 200); - return ml.messages; - }, - - 'check message': function(messages) { - assert.isArray(messages); - assert.equal(messages.length, 1); - assert.ok(levels.INFO.isEqualTo(messages[0].level)); - assert.equal(messages[0].message, 'GET http://url'); - } - }, - - 'logger with options as string': { - topic: function(clm) { - var ml = new MockLogger(); - ml.level = levels.INFO; - var cl = clm.connectLogger(ml, ':method :url'); - request(cl, 'POST', 'http://meh', 200); - return ml.messages; - }, - 'should use the passed in format': function(messages) { - assert.equal(messages[0].message, 'POST http://meh'); - } - }, - - 'auto log levels': { - topic: function(clm) { - var ml = new MockLogger(); - ml.level = levels.INFO; - var cl = clm.connectLogger(ml, { level: 'auto', format: ':method :url' }); - request(cl, 'GET', 'http://meh', 200); - request(cl, 'GET', 'http://meh', 201); - request(cl, 'GET', 'http://meh', 302); - request(cl, 'GET', 'http://meh', 404); - request(cl, 'GET', 'http://meh', 500); - return ml.messages; - }, - - 'should use INFO for 2xx': function(messages) { - assert.ok(levels.INFO.isEqualTo(messages[0].level)); - assert.ok(levels.INFO.isEqualTo(messages[1].level)); - }, - - 'should use WARN for 3xx': function(messages) { - assert.ok(levels.WARN.isEqualTo(messages[2].level)); - }, - - 'should use ERROR for 4xx': function(messages) { - assert.ok(levels.ERROR.isEqualTo(messages[3].level)); - }, - - 'should use ERROR for 5xx': function(messages) { - assert.ok(levels.ERROR.isEqualTo(messages[4].level)); - } - }, - - 'format using a function': { - topic: function(clm) { - var ml = new MockLogger(); - ml.level = levels.INFO; - var cl = clm.connectLogger(ml, function(req, res, formatFn) { return "I was called"; }); - request(cl, 'GET', 'http://blah', 200); - return ml.messages; - }, - - 'should call the format function': function(messages) { - assert.equal(messages[0].message, 'I was called'); - } - }, - - 'format that includes request headers': { - topic: function(clm) { - var ml = new MockLogger(); - ml.level = levels.INFO; - var cl = clm.connectLogger(ml, ':req[Content-Type]'); - request( - cl, - 'GET', 'http://blah', 200, - { 'Content-Type': 'application/json' } - ); - return ml.messages; - }, - 'should output the request header': function(messages) { - assert.equal(messages[0].message, 'application/json'); - } - }, - - 'format that includes response headers': { - topic: function(clm) { - var ml = new MockLogger(); - ml.level = levels.INFO; - var cl = clm.connectLogger(ml, ':res[Content-Type]'); - request( - cl, - 'GET', 'http://blah', 200, - null, - { 'Content-Type': 'application/cheese' } - ); - return ml.messages; - }, - - 'should output the response header': function(messages) { - assert.equal(messages[0].message, 'application/cheese'); - } - } - - } -}).export(module); From 7bad76d8ec4ac4941269b4e065a1475080e496a4 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 08:03:22 +1000 Subject: [PATCH 12/53] no longer needed --- lib/log4js.json | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 lib/log4js.json diff --git a/lib/log4js.json b/lib/log4js.json deleted file mode 100644 index 7b6d3e7d..00000000 --- a/lib/log4js.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "appenders": [ - { - "type": "console" - } - ] -} \ No newline at end of file From c3d3a8c3636f7340a25136bce6858e675f300570 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 08:11:48 +1000 Subject: [PATCH 13/53] moved gelf appender to log4js-gelf --- lib/appenders/gelf.js | 141 --------------------- test/gelfAppender-test.js | 259 -------------------------------------- 2 files changed, 400 deletions(-) delete mode 100644 lib/appenders/gelf.js delete mode 100644 test/gelfAppender-test.js diff --git a/lib/appenders/gelf.js b/lib/appenders/gelf.js deleted file mode 100644 index 3b32915e..00000000 --- a/lib/appenders/gelf.js +++ /dev/null @@ -1,141 +0,0 @@ -"use strict"; -var zlib = require('zlib'); -var layouts = require('../layouts'); -var levels = require('../levels'); -var dgram = require('dgram'); -var util = require('util'); -var debug = require('../debug')('GELF Appender'); - -var LOG_EMERG=0; // system is unusable -var LOG_ALERT=1; // action must be taken immediately -var LOG_CRIT=2; // critical conditions -var LOG_ERR=3; // error conditions -var LOG_ERROR=3; // because people WILL typo -var LOG_WARNING=4; // warning conditions -var LOG_NOTICE=5; // normal, but significant, condition -var LOG_INFO=6; // informational message -var LOG_DEBUG=7; // debug-level message - -var levelMapping = {}; -levelMapping[levels.ALL] = LOG_DEBUG; -levelMapping[levels.TRACE] = LOG_DEBUG; -levelMapping[levels.DEBUG] = LOG_DEBUG; -levelMapping[levels.INFO] = LOG_INFO; -levelMapping[levels.WARN] = LOG_WARNING; -levelMapping[levels.ERROR] = LOG_ERR; -levelMapping[levels.FATAL] = LOG_CRIT; - -/** - * GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog - * - * @param layout a function that takes a logevent and returns a string (defaults to none). - * @param host - host to which to send logs (default:localhost) - * @param port - port at which to send logs to (default:12201) - * @param hostname - hostname of the current host (default:os hostname) - * @param facility - facility to log to (default:nodejs-server) - */ -function gelfAppender (layout, host, port, hostname, facility) { - var config, customFields; - if (typeof(host) === 'object') { - config = host; - host = config.host; - port = config.port; - hostname = config.hostname; - facility = config.facility; - customFields = config.customFields; - } - - host = host || 'localhost'; - port = port || 12201; - hostname = hostname || require('os').hostname(); - facility = facility || 'nodejs-server'; - layout = layout || layouts.messagePassThroughLayout; - - var defaultCustomFields = customFields || {}; - - var client = dgram.createSocket("udp4"); - - process.on('exit', function() { - if (client) client.close(); - }); - - /** - * Add custom fields (start with underscore ) - * - if the first object passed to the logger contains 'GELF' field, - * copy the underscore fields to the message - * @param loggingEvent - * @param msg - */ - function addCustomFields(loggingEvent, msg){ - - /* append defaultCustomFields firsts */ - Object.keys(defaultCustomFields).forEach(function(key) { - // skip _id field for graylog2, skip keys not starts with UNDERSCORE - if (key.match(/^_/) && key !== "_id") { - msg[key] = defaultCustomFields[key]; - } - }); - - /* append custom fields per message */ - var data = loggingEvent.data; - if (!Array.isArray(data) || data.length === 0) return; - var firstData = data[0]; - - if (!firstData.GELF) return; // identify with GELF field defined - Object.keys(firstData).forEach(function(key) { - // skip _id field for graylog2, skip keys not starts with UNDERSCORE - if (key.match(/^_/) || key !== "_id") { - msg[key] = firstData[key]; - } - }); - - /* the custom field object should be removed, so it will not be looged by the later appenders */ - loggingEvent.data.shift(); - } - - function preparePacket(loggingEvent) { - var msg = {}; - addCustomFields(loggingEvent, msg); - msg.full_message = layout(loggingEvent); - msg.short_message = msg.full_message; - - msg.version="1.0"; - msg.timestamp = msg.timestamp || new Date().getTime() / 1000 >> 0; - msg.host = hostname; - msg.level = levelMapping[loggingEvent.level || levels.DEBUG]; - msg.facility = facility; - return msg; - } - - function sendPacket(packet) { - try { - client.send(packet, 0, packet.length, port, host); - } catch(e) {} - } - - return function(loggingEvent) { - var message = preparePacket(loggingEvent); - zlib.gzip(new Buffer(JSON.stringify(message)), function(err, packet) { - if (err) { - console.error(err.stack); - } else { - if (packet.length > 8192) { - debug("Message packet length (" + packet.length + ") is larger than 8k. Not sending"); - } else { - sendPacket(packet); - } - } - }); - }; -} - -function configure(config) { - var layout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - return gelfAppender(layout, config); -} - -exports.appender = gelfAppender; -exports.configure = configure; diff --git a/test/gelfAppender-test.js b/test/gelfAppender-test.js deleted file mode 100644 index 4a1ff58b..00000000 --- a/test/gelfAppender-test.js +++ /dev/null @@ -1,259 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, sandbox = require('sandboxed-module') -, log4js = require('../lib/log4js') -, realLayouts = require('../lib/layouts') -, setupLogging = function(options, category, compressedLength) { - var fakeDgram = { - sent: false, - socket: { - packetLength: 0, - closed: false, - close: function() { - this.closed = true; - }, - send: function(pkt, offset, pktLength, port, host) { - fakeDgram.sent = true; - this.packet = pkt; - this.offset = offset; - this.packetLength = pktLength; - this.port = port; - this.host = host; - } - }, - createSocket: function(type) { - this.type = type; - return this.socket; - } - } - , fakeZlib = { - gzip: function(objectToCompress, callback) { - fakeZlib.uncompressed = objectToCompress; - if (this.shouldError) { - callback({ stack: "oh noes" }); - return; - } - - if (compressedLength) { - callback(null, { length: compressedLength }); - } else { - callback(null, "I've been compressed"); - } - } - } - , exitHandler - , fakeConsole = { - error: function(message) { - this.message = message; - } - } - , fakeLayouts = { - layout: function(type, options) { - this.type = type; - this.options = options; - return realLayouts.messagePassThroughLayout; - }, - messagePassThroughLayout: realLayouts.messagePassThroughLayout - } - , appender = sandbox.require('../lib/appenders/gelf', { - requires: { - dgram: fakeDgram, - zlib: fakeZlib, - '../layouts': fakeLayouts - }, - globals: { - process: { - on: function(evt, handler) { - if (evt === 'exit') { - exitHandler = handler; - } - } - }, - console: fakeConsole - } - }); - - log4js.clearAppenders(); - log4js.addAppender(appender.configure(options || {}), category || "gelf-test"); - return { - dgram: fakeDgram, - compress: fakeZlib, - exitHandler: exitHandler, - console: fakeConsole, - layouts: fakeLayouts, - logger: log4js.getLogger(category || "gelf-test") - }; -}; - -vows.describe('log4js gelfAppender').addBatch({ - - 'with default gelfAppender settings': { - topic: function() { - var setup = setupLogging(); - setup.logger.info("This is a test"); - return setup; - }, - 'the dgram packet': { - topic: function(setup) { - return setup.dgram; - }, - 'should be sent via udp to the localhost gelf server': function(dgram) { - assert.equal(dgram.type, "udp4"); - assert.equal(dgram.socket.host, "localhost"); - assert.equal(dgram.socket.port, 12201); - assert.equal(dgram.socket.offset, 0); - assert.ok(dgram.socket.packetLength > 0, "Received blank message"); - }, - 'should be compressed': function(dgram) { - assert.equal(dgram.socket.packet, "I've been compressed"); - } - }, - 'the uncompressed log message': { - topic: function(setup) { - var message = JSON.parse(setup.compress.uncompressed); - return message; - }, - 'should be in the gelf format': function(message) { - assert.equal(message.version, '1.0'); - assert.equal(message.host, require('os').hostname()); - assert.equal(message.level, 6); //INFO - assert.equal(message.facility, 'nodejs-server'); - assert.equal(message.full_message, message.short_message); - assert.equal(message.full_message, 'This is a test'); - } - } - }, - 'with a message longer than 8k': { - topic: function() { - var setup = setupLogging(undefined, undefined, 10240); - setup.logger.info("Blah."); - return setup; - }, - 'the dgram packet': { - topic: function(setup) { - return setup.dgram; - }, - 'should not be sent': function(dgram) { - assert.equal(dgram.sent, false); - } - } - }, - 'with non-default options': { - topic: function() { - var setup = setupLogging({ - host: 'somewhere', - port: 12345, - hostname: 'cheese', - facility: 'nonsense' - }); - setup.logger.debug("Just testing."); - return setup; - }, - 'the dgram packet': { - topic: function(setup) { - return setup.dgram; - }, - 'should pick up the options': function(dgram) { - assert.equal(dgram.socket.host, 'somewhere'); - assert.equal(dgram.socket.port, 12345); - } - }, - 'the uncompressed packet': { - topic: function(setup) { - var message = JSON.parse(setup.compress.uncompressed); - return message; - }, - 'should pick up the options': function(message) { - assert.equal(message.host, 'cheese'); - assert.equal(message.facility, 'nonsense'); - } - } - }, - - 'on process.exit': { - topic: function() { - var setup = setupLogging(); - setup.exitHandler(); - return setup; - }, - 'should close open sockets': function(setup) { - assert.isTrue(setup.dgram.socket.closed); - } - }, - - 'on zlib error': { - topic: function() { - var setup = setupLogging(); - setup.compress.shouldError = true; - setup.logger.info('whatever'); - return setup; - }, - 'should output to console.error': function(setup) { - assert.equal(setup.console.message, 'oh noes'); - } - }, - - 'with layout in configuration': { - topic: function() { - var setup = setupLogging({ - layout: { - type: 'madeuplayout', - earlgrey: 'yes, please' - } - }); - return setup; - }, - 'should pass options to layout': function(setup) { - assert.equal(setup.layouts.type, 'madeuplayout'); - assert.equal(setup.layouts.options.earlgrey, 'yes, please'); - } - }, - - 'with custom fields options': { - topic: function() { - var setup = setupLogging({ - host: 'somewhere', - port: 12345, - hostname: 'cheese', - facility: 'nonsense', - customFields: { - _every1: 'Hello every one', - _every2: 'Hello every two' - } - }); - var myFields = { - GELF: true, - _every2: 'Overwritten!', - _myField: 'This is my field!' - }; - setup.logger.debug(myFields, "Just testing."); - return setup; - }, - 'the dgram packet': { - topic: function(setup) { - return setup.dgram; - }, - 'should pick up the options': function(dgram) { - assert.equal(dgram.socket.host, 'somewhere'); - assert.equal(dgram.socket.port, 12345); - } - }, - 'the uncompressed packet': { - topic: function(setup) { - var message = JSON.parse(setup.compress.uncompressed); - return message; - }, - 'should pick up the options': function(message) { - assert.equal(message.host, 'cheese'); - assert.equal(message.facility, 'nonsense'); - assert.equal(message._every1, 'Hello every one'); // the default value - assert.equal(message._every2, 'Overwritten!'); // the overwritten value - assert.equal(message._myField, 'This is my field!'); // the value for this message only - assert.equal(message.short_message, 'Just testing.'); // skip the field object - assert.equal(message.full_message, 'Just testing.'); // should be as same as short_message - } - } - } - -}).export(module); From b12a39ac79b5fcb083731705693048dd9a7a6047 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 08:14:44 +1000 Subject: [PATCH 14/53] moved hookio appender to log4js-hookio --- lib/appenders/hookio.js | 76 ---------------- test/hookioAppender-test.js | 176 ------------------------------------ 2 files changed, 252 deletions(-) delete mode 100644 lib/appenders/hookio.js delete mode 100644 test/hookioAppender-test.js diff --git a/lib/appenders/hookio.js b/lib/appenders/hookio.js deleted file mode 100644 index 7821d79b..00000000 --- a/lib/appenders/hookio.js +++ /dev/null @@ -1,76 +0,0 @@ -"use strict"; -var log4js = require('../log4js') -, layouts = require('../layouts') -, Hook = require('hook.io').Hook -, util = require('util'); - -var Logger = function createLogger(options) { - var self = this; - var actualAppender = options.actualAppender; - Hook.call(self, options); - self.on('hook::ready', function hookReady() { - self.on('*::' + options.name + '::log', function log(loggingEvent) { - deserializeLoggingEvent(loggingEvent); - actualAppender(loggingEvent); - }); - }); -}; -util.inherits(Logger, Hook); - -function deserializeLoggingEvent(loggingEvent) { - loggingEvent.startTime = new Date(loggingEvent.startTime); - loggingEvent.level.toString = function levelToString() { - return loggingEvent.level.levelStr; - }; -} - -function initHook(hookioOptions) { - var loggerHook; - if (hookioOptions.mode === 'master') { - // Start the master hook, handling the actual logging - loggerHook = new Logger(hookioOptions); - } else { - // Start a worker, just emitting events for a master - loggerHook = new Hook(hookioOptions); - } - loggerHook.start(); - return loggerHook; -} - -function getBufferedHook(hook, eventName) { - var hookBuffer = []; - var hookReady = false; - hook.on('hook::ready', function emptyBuffer() { - hookBuffer.forEach(function logBufferItem(loggingEvent) { - hook.emit(eventName, loggingEvent); - }); - hookReady = true; - }); - - return function log(loggingEvent) { - if (hookReady) { - hook.emit(eventName, loggingEvent); - } else { - hookBuffer.push(loggingEvent); - } - }; -} - -function createAppender(hookioOptions) { - var loggerHook = initHook(hookioOptions); - var loggerEvent = hookioOptions.name + '::log'; - return getBufferedHook(loggerHook, loggerEvent); -} - -function configure(config) { - var actualAppender; - if (config.appender && config.mode === 'master') { - log4js.loadAppender(config.appender.type); - actualAppender = log4js.appenderMakers[config.appender.type](config.appender); - config.actualAppender = actualAppender; - } - return createAppender(config); -} - -exports.appender = createAppender; -exports.configure = configure; diff --git a/test/hookioAppender-test.js b/test/hookioAppender-test.js deleted file mode 100644 index d1c00afd..00000000 --- a/test/hookioAppender-test.js +++ /dev/null @@ -1,176 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, sandbox = require('sandboxed-module'); - -function fancyResultingHookioAppender(hookNotReady) { - var emitHook = !hookNotReady - , result = { ons: {}, emissions: {}, logged: [], configs: [] }; - - var fakeLog4Js = { - appenderMakers: {} - }; - fakeLog4Js.loadAppender = function (appender) { - fakeLog4Js.appenderMakers[appender] = function (config) { - result.actualLoggerConfig = config; - return function log(logEvent) { - result.logged.push(logEvent); - }; - }; - }; - - var fakeHookIo = { Hook: function(config) { result.configs.push(config); } }; - fakeHookIo.Hook.prototype.start = function () { - result.startCalled = true; - }; - fakeHookIo.Hook.prototype.on = function (eventName, functionToExec) { - result.ons[eventName] = { functionToExec: functionToExec }; - if (emitHook && eventName === 'hook::ready') { - functionToExec(); - } - }; - fakeHookIo.Hook.prototype.emit = function (eventName, data) { - result.emissions[eventName] = result.emissions[eventName] || []; - result.emissions[eventName].push({data: data}); - var on = '*::' + eventName; - if (eventName !== 'hook::ready' && result.ons[on]) { - result.ons[on].callingCount = - result.ons[on].callingCount ? result.ons[on].callingCount += 1 : 1; - result.ons[on].functionToExec(data); - } - }; - - return { theResult: result, - theModule: sandbox.require('../lib/appenders/hookio', { - requires: { - '../log4js': fakeLog4Js, - 'hook.io': fakeHookIo - } - }) - }; -} - - -vows.describe('log4js hookioAppender').addBatch({ - 'master': { - topic: function() { - var fancy = fancyResultingHookioAppender(); - var logger = fancy.theModule.configure( - { - name: 'ohno', - mode: 'master', - 'hook-port': 5001, - appender: { type: 'file' } - } - ); - logger( - { - level: { levelStr: 'INFO' }, - data: "ALRIGHTY THEN", - startTime: '2011-10-27T03:53:16.031Z' - } - ); - logger( - { - level: { levelStr: 'DEBUG' }, - data: "OH WOW", - startTime: '2011-10-27T04:53:16.031Z' - } - ); - return fancy.theResult; - }, - - 'should write to the actual appender': function (result) { - assert.isTrue(result.startCalled); - assert.equal(result.configs.length, 1); - assert.equal(result.configs[0]['hook-port'], 5001); - assert.equal(result.logged.length, 2); - assert.equal(result.emissions['ohno::log'].length, 2); - assert.equal(result.ons['*::ohno::log'].callingCount, 2); - }, - - 'data written should be formatted correctly': function (result) { - assert.equal(result.logged[0].level.toString(), 'INFO'); - assert.equal(result.logged[0].data, 'ALRIGHTY THEN'); - assert.isTrue(typeof(result.logged[0].startTime) === 'object'); - assert.equal(result.logged[1].level.toString(), 'DEBUG'); - assert.equal(result.logged[1].data, 'OH WOW'); - assert.isTrue(typeof(result.logged[1].startTime) === 'object'); - }, - - 'the actual logger should get the right config': function (result) { - assert.equal(result.actualLoggerConfig.type, 'file'); - } - }, - 'worker': { - 'should emit logging events to the master': { - topic: function() { - var fancy = fancyResultingHookioAppender(); - var logger = fancy.theModule.configure({ - name: 'ohno', - mode: 'worker', - appender: { type: 'file' } - }); - logger({ - level: { levelStr: 'INFO' }, - data: "ALRIGHTY THEN", - startTime: '2011-10-27T03:53:16.031Z' - }); - logger({ - level: { levelStr: 'DEBUG' }, - data: "OH WOW", - startTime: '2011-10-27T04:53:16.031Z' - }); - return fancy.theResult; - }, - - 'should not write to the actual appender': function (result) { - assert.isTrue(result.startCalled); - assert.equal(result.logged.length, 0); - assert.equal(result.emissions['ohno::log'].length, 2); - assert.isUndefined(result.ons['*::ohno::log']); - } - } - }, - 'when hook not ready': { - topic: function() { - var fancy = fancyResultingHookioAppender(true) - , logger = fancy.theModule.configure({ - name: 'ohno', - mode: 'worker' - }); - - logger({ - level: { levelStr: 'INFO' }, - data: "something", - startTime: '2011-10-27T03:45:12.031Z' - }); - return fancy; - }, - 'should buffer the log events': function(fancy) { - assert.isUndefined(fancy.theResult.emissions['ohno::log']); - }, - }, - 'when hook ready': { - topic: function() { - var fancy = fancyResultingHookioAppender(true) - , logger = fancy.theModule.configure({ - name: 'ohno', - mode: 'worker' - }); - - logger({ - level: { levelStr: 'INFO' }, - data: "something", - startTime: '2011-10-27T03:45:12.031Z' - }); - - fancy.theResult.ons['hook::ready'].functionToExec(); - return fancy; - }, - 'should emit the buffered events': function(fancy) { - assert.equal(fancy.theResult.emissions['ohno::log'].length, 1); - } - } - -}).exportTo(module); From bdb56e42562a7cd17d90270e35422e872576e84b Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 08:19:51 +1000 Subject: [PATCH 15/53] moved smtp appender to log4js-smtp --- lib/appenders/smtp.js | 82 -------------- test/smtpAppender-test.js | 233 -------------------------------------- 2 files changed, 315 deletions(-) delete mode 100644 lib/appenders/smtp.js delete mode 100644 test/smtpAppender-test.js diff --git a/lib/appenders/smtp.js b/lib/appenders/smtp.js deleted file mode 100644 index 85accee4..00000000 --- a/lib/appenders/smtp.js +++ /dev/null @@ -1,82 +0,0 @@ -"use strict"; -var layouts = require("../layouts") -, mailer = require("nodemailer") -, os = require('os'); - -/** -* SMTP Appender. Sends logging events using SMTP protocol. -* It can either send an email on each event or group several -* logging events gathered during specified interval. -* -* @param config appender configuration data -* config.sendInterval time between log emails (in seconds), if 0 -* then every event sends an email -* @param layout a function that takes a logevent and returns a string (defaults to basicLayout). -*/ -function smtpAppender(config, layout) { - layout = layout || layouts.basicLayout; - var subjectLayout = layouts.messagePassThroughLayout; - var sendInterval = config.sendInterval*1000 || 0; - - var logEventBuffer = []; - var sendTimer; - - function sendBuffer() { - if (logEventBuffer.length > 0) { - - var transport = mailer.createTransport(config.transport, config[config.transport]); - var firstEvent = logEventBuffer[0]; - var body = ""; - while (logEventBuffer.length > 0) { - body += layout(logEventBuffer.shift()) + "\n"; - } - - var msg = { - to: config.recipients, - subject: config.subject || subjectLayout(firstEvent), - text: body, - headers: { "Hostname": os.hostname() } - }; - if (config.sender) { - msg.from = config.sender; - } - transport.sendMail(msg, function(error, success) { - if (error) { - console.error("log4js.smtpAppender - Error happened", error); - } - transport.close(); - }); - } - } - - function scheduleSend() { - if (!sendTimer) { - sendTimer = setTimeout(function() { - sendTimer = null; - sendBuffer(); - }, sendInterval); - } - } - - return function(loggingEvent) { - logEventBuffer.push(loggingEvent); - if (sendInterval > 0) { - scheduleSend(); - } else { - sendBuffer(); - } - }; -} - -function configure(config) { - var layout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - return smtpAppender(config, layout); -} - -exports.name = "smtp"; -exports.appender = smtpAppender; -exports.configure = configure; - diff --git a/test/smtpAppender-test.js b/test/smtpAppender-test.js deleted file mode 100644 index 27cc179f..00000000 --- a/test/smtpAppender-test.js +++ /dev/null @@ -1,233 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, log4js = require('../lib/log4js') -, sandbox = require('sandboxed-module') -; - -function setupLogging(category, options) { - var msgs = []; - - var fakeMailer = { - createTransport: function (name, options) { - return { - config: options, - sendMail: function (msg, callback) { - msgs.push(msg); - callback(null, true); - }, - close: function() {} - }; - } - }; - - var fakeLayouts = { - layout: function(type, config) { - this.type = type; - this.config = config; - return log4js.layouts.messagePassThroughLayout; - }, - basicLayout: log4js.layouts.basicLayout, - messagePassThroughLayout: log4js.layouts.messagePassThroughLayout - }; - - var fakeConsole = { - errors: [], - error: function(msg, value) { - this.errors.push({ msg: msg, value: value }); - } - }; - - var smtpModule = sandbox.require('../lib/appenders/smtp', { - requires: { - 'nodemailer': fakeMailer, - '../layouts': fakeLayouts - }, - globals: { - console: fakeConsole - } - }); - - log4js.addAppender(smtpModule.configure(options), category); - - return { - logger: log4js.getLogger(category), - mailer: fakeMailer, - layouts: fakeLayouts, - console: fakeConsole, - results: msgs - }; -} - -function checkMessages (result, sender, subject) { - for (var i = 0; i < result.results.length; ++i) { - assert.equal(result.results[i].from, sender); - assert.equal(result.results[i].to, 'recipient@domain.com'); - assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i+1)); - assert.ok(new RegExp('.+Log event #' + (i+1) + '\n$').test(result.results[i].text)); - } -} - -log4js.clearAppenders(); -vows.describe('log4js smtpAppender').addBatch({ - 'minimal config': { - topic: function() { - var setup = setupLogging('minimal config', { - recipients: 'recipient@domain.com', - transport: "SMTP", - SMTP: { - port: 25, - auth: { - user: 'user@domain.com' - } - } - }); - setup.logger.info('Log event #1'); - return setup; - }, - 'there should be one message only': function (result) { - assert.equal(result.results.length, 1); - }, - 'message should contain proper data': function (result) { - checkMessages(result); - } - }, - 'fancy config': { - topic: function() { - var setup = setupLogging('fancy config', { - recipients: 'recipient@domain.com', - sender: 'sender@domain.com', - subject: 'This is subject', - transport: "SMTP", - SMTP: { - port: 25, - auth: { - user: 'user@domain.com' - } - } - }); - setup.logger.info('Log event #1'); - return setup; - }, - 'there should be one message only': function (result) { - assert.equal(result.results.length, 1); - }, - 'message should contain proper data': function (result) { - checkMessages(result, 'sender@domain.com', 'This is subject'); - } - }, - 'config with layout': { - topic: function() { - var setup = setupLogging('config with layout', { - layout: { - type: "tester" - } - }); - return setup; - }, - 'should configure layout': function(result) { - assert.equal(result.layouts.type, 'tester'); - } - }, - 'separate email for each event': { - topic: function() { - var self = this; - var setup = setupLogging('separate email for each event', { - recipients: 'recipient@domain.com', - transport: "SMTP", - SMTP: { - port: 25, - auth: { - user: 'user@domain.com' - } - } - }); - setTimeout(function () { - setup.logger.info('Log event #1'); - }, 0); - setTimeout(function () { - setup.logger.info('Log event #2'); - }, 500); - setTimeout(function () { - setup.logger.info('Log event #3'); - }, 1050); - setTimeout(function () { - self.callback(null, setup); - }, 2100); - }, - 'there should be three messages': function (result) { - assert.equal(result.results.length, 3); - }, - 'messages should contain proper data': function (result) { - checkMessages(result); - } - }, - 'multiple events in one email': { - topic: function() { - var self = this; - var setup = setupLogging('multiple events in one email', { - recipients: 'recipient@domain.com', - sendInterval: 1, - transport: "SMTP", - SMTP: { - port: 25, - auth: { - user: 'user@domain.com' - } - } - }); - setTimeout(function () { - setup.logger.info('Log event #1'); - }, 0); - setTimeout(function () { - setup.logger.info('Log event #2'); - }, 500); - setTimeout(function () { - setup.logger.info('Log event #3'); - }, 1050); - setTimeout(function () { - self.callback(null, setup); - }, 2100); - }, - 'there should be two messages': function (result) { - assert.equal(result.results.length, 2); - }, - 'messages should contain proper data': function (result) { - assert.equal(result.results[0].to, 'recipient@domain.com'); - assert.equal(result.results[0].subject, 'Log event #1'); - assert.equal(result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2); - assert.equal(result.results[1].to, 'recipient@domain.com'); - assert.equal(result.results[1].subject, 'Log event #3'); - assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text)); - } - }, - 'error when sending email': { - topic: function() { - var setup = setupLogging('error when sending email', { - recipients: 'recipient@domain.com', - sendInterval: 0, - transport: 'SMTP', - SMTP: { port: 25, auth: { user: 'user@domain.com' } } - }); - - setup.mailer.createTransport = function() { - return { - sendMail: function(msg, cb) { - cb({ message: "oh noes" }); - }, - close: function() { } - }; - }; - - setup.logger.info("This will break"); - return setup.console; - }, - 'should be logged to console': function(cons) { - assert.equal(cons.errors.length, 1); - assert.equal(cons.errors[0].msg, "log4js.smtpAppender - Error happened"); - assert.equal(cons.errors[0].value.message, 'oh noes'); - } - } - -}).export(module); - From 270ba0fcc617d4ec3cf96f8d0322f08134ef45dd Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 08:44:36 +1000 Subject: [PATCH 16/53] added initial configuration --- lib/log4js.js | 24 +++++++++++------------- test/log4js-test.js | 30 ++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 13 deletions(-) diff --git a/lib/log4js.js b/lib/log4js.js index c781a307..f92a999b 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -20,18 +20,21 @@ *

Example:

*
  *  var logging = require('log4js');
- *  //add an appender that logs all messages to stdout.
- *  logging.addAppender(logging.consoleAppender());
- *  //add an appender that logs "some-category" to a file
- *  logging.addAppender(logging.fileAppender("file.log"), "some-category");
+ *  logging.configure({
+ *    appenders: {
+ *      "errorFile": { type: "file", filename: "error.log" }
+ *    },
+ *    categories: {
+ *      "default": { level: "ERROR", appenders: [ "errorFile" ] }
+ *    }
+ *  });
  *  //get a logger
  *  var log = logging.getLogger("some-category");
- *  log.setLevel(logging.levels.TRACE); //set the Level
  *
  *  ...
  *
  *  //call the log
- *  log.trace("trace me" );
+ *  log.error("oh noes");
  * 
* * NOTE: the authors below are the original browser-based log4js authors @@ -286,11 +289,6 @@ function restoreConsole() { */ module.exports = { getLogger: getLogger, -/* - addAppender: addAppender, - loadAppender: loadAppender, - clearAppenders: clearAppenders, -*/ configure: configure, /* replaceConsole: replaceConsole, @@ -306,6 +304,6 @@ module.exports = { }; //set ourselves up -//debug("Starting configuration"); -//configure(); +debug("Starting configuration"); +configure(defaultConfig); diff --git a/test/log4js-test.js b/test/log4js-test.js index 40742c90..6c721e90 100644 --- a/test/log4js-test.js +++ b/test/log4js-test.js @@ -288,4 +288,34 @@ describe('../lib/log4js', function() { it('should reload configuration if specified'); }); + + describe('with no configuration', function() { + var events = [] + , log4js_sandboxed = sandbox.require( + '../lib/log4js', + { + requires: { + './appenders/console': { + configure: function() { + return function(event) { events.push(event); }; + } + } + } + } + ); + + log4js_sandboxed.getLogger("blah").debug("goes to console"); + log4js_sandboxed.getLogger("yawn").trace("does not go to console"); + log4js_sandboxed.getLogger().error("also goes to console"); + + it('should log events of debug level and higher to console', function() { + events.should.have.length(2); + events[0].data[0].should.eql("goes to console"); + events[0].category.should.eql("blah"); + events[0].level.toString().should.eql("DEBUG"); + events[1].data[0].should.eql("also goes to console"); + events[1].category.should.eql("default"); + events[1].level.toString().should.eql("ERROR"); + }); + }); }); From 834e084e5a393690b6e1b74e3d859c9a12d64e53 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 08:47:29 +1000 Subject: [PATCH 17/53] setLevel removed, so no need for this test --- test/setLevel-asymmetry-test.js | 100 -------------------------------- 1 file changed, 100 deletions(-) delete mode 100644 test/setLevel-asymmetry-test.js diff --git a/test/setLevel-asymmetry-test.js b/test/setLevel-asymmetry-test.js deleted file mode 100644 index 95ba84b4..00000000 --- a/test/setLevel-asymmetry-test.js +++ /dev/null @@ -1,100 +0,0 @@ -"use strict"; -/* jshint loopfunc: true */ -// This test shows an asymmetry between setLevel and isLevelEnabled -// (in log4js-node@0.4.3 and earlier): -// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently -// does not (sets the level to TRACE). -// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo). -// - -// Basic set up -var vows = require('vows'); -var assert = require('assert'); -var log4js = require('../lib/log4js'); -var logger = log4js.getLogger('test-setLevel-asymmetry'); - -// uncomment one or other of the following to see progress (or not) while running the tests -// var showProgress = console.log; -var showProgress = function() {}; - - -// Define the array of levels as string to iterate over. -var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal']; - -var log4jsLevels =[]; -// populate an array with the log4js.levels that match the strLevels. -// Would be nice if we could iterate over log4js.levels instead, -// but log4js.levels.toLevel prevents that for now. -strLevels.forEach(function(l) { - log4jsLevels.push(log4js.levels.toLevel(l)); -}); - - -// We are going to iterate over this object's properties to define an exhaustive list of vows. -var levelTypes = { - 'string': strLevels, - 'log4js.levels.level': log4jsLevels, -}; - -// Set up the basic vows batch for this test -var batch = { - setLevel: { - } -}; - -showProgress('Populating batch object...'); - -// Populating the batch object programmatically, -// as I don't have the patience to manually populate it with -// the (strLevels.length x levelTypes.length) ^ 2 = 144 possible test combinations -for (var type in levelTypes) { - var context = 'is called with a '+type; - var levelsToTest = levelTypes[type]; - showProgress('Setting up the vows context for '+context); - - batch.setLevel[context]= {}; - levelsToTest.forEach( function(level) { - var subContext = 'of '+level; - var log4jsLevel=log4js.levels.toLevel(level.toString()); - - showProgress('Setting up the vows sub-context for '+subContext); - batch.setLevel[context][subContext] = {topic: level}; - for (var comparisonType in levelTypes) { - levelTypes[comparisonType].forEach(function(comparisonLevel) { - var t = type; - var ct = comparisonType; - var expectedResult = log4jsLevel.isLessThanOrEqualTo(comparisonLevel); - var vow = 'isLevelEnabled(' + comparisonLevel + - ') called with a ' + comparisonType + - ' should return ' + expectedResult; - showProgress('Setting up the vows vow for '+vow); - - batch.setLevel[context][subContext][vow] = function(levelToSet) { - logger.setLevel(levelToSet); - showProgress( - '*** Checking setLevel( ' + level + - ' ) of type ' + t + - ', and isLevelEnabled( ' + comparisonLevel + - ' ) of type ' + ct + '. Expecting: ' + expectedResult - ); - assert.equal( - logger.isLevelEnabled(comparisonLevel), - expectedResult, - 'Failed: calling setLevel( ' + level + - ' ) with type ' + type + - ', isLevelEnabled( ' + comparisonLevel + - ' ) of type ' + comparisonType + - ' did not return ' + expectedResult - ); - }; - }); - } - }); - -} - -showProgress('Running tests...'); - -vows.describe('log4js setLevel asymmetry fix').addBatch(batch).export(module); - - From b70e2e62201006e11dc7e3f461e622741d0cbb46 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 08:48:46 +1000 Subject: [PATCH 18/53] setGlobalLogLevel removed, so no need for this test --- test/global-log-level-test.js | 86 ----------------------------------- 1 file changed, 86 deletions(-) delete mode 100644 test/global-log-level-test.js diff --git a/test/global-log-level-test.js b/test/global-log-level-test.js deleted file mode 100644 index df9b3598..00000000 --- a/test/global-log-level-test.js +++ /dev/null @@ -1,86 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert'); - -vows.describe('log4js global loglevel').addBatch({ - 'global loglevel' : { - topic: function() { - var log4js = require('../lib/log4js'); - return log4js; - }, - - 'set global loglevel on creation': function(log4js) { - var log1 = log4js.getLogger('log1'); - var level = 'OFF'; - if (log1.level.toString() == level) { - level = 'TRACE'; - } - assert.notEqual(log1.level.toString(), level); - - log4js.setGlobalLogLevel(level); - assert.equal(log1.level.toString(), level); - - var log2 = log4js.getLogger('log2'); - assert.equal(log2.level.toString(), level); - }, - - 'global change loglevel': function(log4js) { - var log1 = log4js.getLogger('log1'); - var log2 = log4js.getLogger('log2'); - var level = 'OFF'; - if (log1.level.toString() == level) { - level = 'TRACE'; - } - assert.notEqual(log1.level.toString(), level); - - log4js.setGlobalLogLevel(level); - assert.equal(log1.level.toString(), level); - assert.equal(log2.level.toString(), level); - }, - - 'override loglevel': function(log4js) { - var log1 = log4js.getLogger('log1'); - var log2 = log4js.getLogger('log2'); - var level = 'OFF'; - if (log1.level.toString() == level) { - level = 'TRACE'; - } - assert.notEqual(log1.level.toString(), level); - - var oldLevel = log1.level.toString(); - assert.equal(log2.level.toString(), oldLevel); - - log2.setLevel(level); - assert.equal(log1.level.toString(), oldLevel); - assert.equal(log2.level.toString(), level); - assert.notEqual(oldLevel, level); - - log2.removeLevel(); - assert.equal(log1.level.toString(), oldLevel); - assert.equal(log2.level.toString(), oldLevel); - }, - - 'preload loglevel': function(log4js) { - var log1 = log4js.getLogger('log1'); - var level = 'OFF'; - if (log1.level.toString() == level) { - level = 'TRACE'; - } - assert.notEqual(log1.level.toString(), level); - - var oldLevel = log1.level.toString(); - log4js.getLogger('log2').setLevel(level); - - assert.equal(log1.level.toString(), oldLevel); - - // get again same logger but as different variable - var log2 = log4js.getLogger('log2'); - assert.equal(log2.level.toString(), level); - assert.notEqual(oldLevel, level); - - log2.removeLevel(); - assert.equal(log1.level.toString(), oldLevel); - assert.equal(log2.level.toString(), oldLevel); - } - } -}).export(module); From 7f38837f9b551d6c17d590d7e38d377ff73ed49d Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 11:46:55 +1000 Subject: [PATCH 19/53] removed duplicated tests --- test/logging-test.js | 372 +------------------------------------------ 1 file changed, 1 insertion(+), 371 deletions(-) diff --git a/test/logging-test.js b/test/logging-test.js index 8615ac0d..382ec0f3 100644 --- a/test/logging-test.js +++ b/test/logging-test.js @@ -32,365 +32,7 @@ function setupConsoleTest() { } vows.describe('log4js').addBatch({ - 'getLogger': { - topic: function() { - var log4js = require('../lib/log4js'); - log4js.clearAppenders(); - var logger = log4js.getLogger('tests'); - logger.setLevel("DEBUG"); - return logger; - }, - - 'should take a category and return a logger': function(logger) { - assert.equal(logger.category, 'tests'); - assert.equal(logger.level.toString(), "DEBUG"); - assert.isFunction(logger.debug); - assert.isFunction(logger.info); - assert.isFunction(logger.warn); - assert.isFunction(logger.error); - assert.isFunction(logger.fatal); - }, - - 'log events' : { - topic: function(logger) { - var events = []; - var log4js = require('../lib/log4js'); - log4js.addAppender(function (logEvent) { events.push(logEvent); }, "tests"); - logger.debug("Debug event"); - logger.trace("Trace event 1"); - logger.trace("Trace event 2"); - logger.warn("Warning event"); - logger.error("Aargh!", new Error("Pants are on fire!")); - logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" }); - return events; - }, - - 'should emit log events': function(events) { - assert.equal(events[0].level.toString(), 'DEBUG'); - assert.equal(events[0].data[0], 'Debug event'); - assert.instanceOf(events[0].startTime, Date); - }, - - 'should not emit events of a lower level': function(events) { - assert.equal(events.length, 4); - assert.equal(events[1].level.toString(), 'WARN'); - }, - - 'should include the error if passed in': function (events) { - assert.instanceOf(events[2].data[1], Error); - assert.equal(events[2].data[1].message, 'Pants are on fire!'); - } - - }, - - }, -/* - 'invalid configuration': { - 'should throw an exception': function() { - assert.throws(function() { - require('log4js').configure({ "type": "invalid" }); - }); - } - }, - - 'configuration when passed as object': { - topic: function() { - var appenderConfig, - log4js = sandbox.require( - '../lib/log4js', - { - requires: { - './appenders/file': - { - name: "file", - appender: function() {}, - configure: function(configuration) { - appenderConfig = configuration; - return function() {}; - } - } - } - } - ), - config = { appenders: - [ { "type" : "file", - "filename" : "cheesy-wotsits.log", - "maxLogSize" : 1024, - "backups" : 3 - } - ] - }; - log4js.configure(config); - return appenderConfig; - }, - 'should be passed to appender config': function(configuration) { - assert.equal(configuration.filename, 'cheesy-wotsits.log'); - } - }, - - 'configuration that causes an error': { - topic: function() { - var log4js = sandbox.require( - '../lib/log4js', - { - requires: { - './appenders/file': - { - name: "file", - appender: function() {}, - configure: function(configuration) { - throw new Error("oh noes"); - } - } - } - } - ), - config = { appenders: - [ { "type" : "file", - "filename" : "cheesy-wotsits.log", - "maxLogSize" : 1024, - "backups" : 3 - } - ] - }; - try { - log4js.configure(config); - } catch (e) { - return e; - } - }, - 'should wrap error in a meaningful message': function(e) { - assert.ok(e.message.indexOf('log4js configuration problem for') > -1); - } - }, - - 'configuration when passed as filename': { - topic: function() { - var appenderConfig, - configFilename, - log4js = sandbox.require( - '../lib/log4js', - { requires: - { 'fs': - { statSync: - function() { - return { mtime: Date.now() }; - }, - readFileSync: - function(filename) { - configFilename = filename; - return JSON.stringify({ - appenders: [ - { type: "file" - , filename: "whatever.log" - } - ] - }); - }, - readdirSync: - function() { - return ['file']; - } - }, - './appenders/file': - { name: "file", - appender: function() {}, - configure: function(configuration) { - appenderConfig = configuration; - return function() {}; - } - } - } - } - ); - log4js.configure("/path/to/cheese.json"); - return [ configFilename, appenderConfig ]; - }, - 'should read the config from a file': function(args) { - assert.equal(args[0], '/path/to/cheese.json'); - }, - 'should pass config to appender': function(args) { - assert.equal(args[1].filename, "whatever.log"); - } - }, - - 'with no appenders defined' : { - topic: function() { - var logger, - that = this, - fakeConsoleAppender = { - name: "console", - appender: function() { - return function(evt) { - that.callback(null, evt); - }; - }, - configure: function() { - return fakeConsoleAppender.appender(); - } - }, - log4js = sandbox.require( - '../lib/log4js', - { - requires: { - './appenders/console': fakeConsoleAppender - } - } - ); - logger = log4js.getLogger("some-logger"); - logger.debug("This is a test"); - }, - 'should default to the console appender': function(evt) { - assert.equal(evt.data[0], "This is a test"); - } - }, - - 'addAppender' : { - topic: function() { - var log4js = require('../lib/log4js'); - log4js.clearAppenders(); - return log4js; - }, - 'without a category': { - 'should register the function as a listener for all loggers': function (log4js) { - var appenderEvent, - appender = function(evt) { appenderEvent = evt; }, - logger = log4js.getLogger("tests"); - - log4js.addAppender(appender); - logger.debug("This is a test"); - assert.equal(appenderEvent.data[0], "This is a test"); - assert.equal(appenderEvent.categoryName, "tests"); - assert.equal(appenderEvent.level.toString(), "DEBUG"); - }, - 'if an appender for a category is defined': { - 'should register for that category': function (log4js) { - var otherEvent, - appenderEvent, - cheeseLogger; - - log4js.addAppender(function (evt) { appenderEvent = evt; }); - log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese'); - - cheeseLogger = log4js.getLogger('cheese'); - cheeseLogger.debug('This is a test'); - assert.deepEqual(appenderEvent, otherEvent); - assert.equal(otherEvent.data[0], 'This is a test'); - assert.equal(otherEvent.categoryName, 'cheese'); - - otherEvent = undefined; - appenderEvent = undefined; - log4js.getLogger('pants').debug("this should not be propagated to otherEvent"); - assert.isUndefined(otherEvent); - assert.equal(appenderEvent.data[0], "this should not be propagated to otherEvent"); - } - } - }, - - 'with a category': { - 'should only register the function as a listener for that category': function(log4js) { - var appenderEvent, - appender = function(evt) { appenderEvent = evt; }, - logger = log4js.getLogger("tests"); - - log4js.addAppender(appender, 'tests'); - logger.debug('this is a category test'); - assert.equal(appenderEvent.data[0], 'this is a category test'); - - appenderEvent = undefined; - log4js.getLogger('some other category').debug('Cheese'); - assert.isUndefined(appenderEvent); - } - }, - - 'with multiple categories': { - 'should register the function as a listener for all the categories': function(log4js) { - var appenderEvent, - appender = function(evt) { appenderEvent = evt; }, - logger = log4js.getLogger('tests'); - - log4js.addAppender(appender, 'tests', 'biscuits'); - - logger.debug('this is a test'); - assert.equal(appenderEvent.data[0], 'this is a test'); - appenderEvent = undefined; - - var otherLogger = log4js.getLogger('biscuits'); - otherLogger.debug("mmm... garibaldis"); - assert.equal(appenderEvent.data[0], "mmm... garibaldis"); - - appenderEvent = undefined; - - log4js.getLogger("something else").debug("pants"); - assert.isUndefined(appenderEvent); - }, - 'should register the function when the list of categories is an array': function(log4js) { - var appenderEvent, - appender = function(evt) { appenderEvent = evt; }; - - log4js.addAppender(appender, ['tests', 'pants']); - - log4js.getLogger('tests').debug('this is a test'); - assert.equal(appenderEvent.data[0], 'this is a test'); - - appenderEvent = undefined; - - log4js.getLogger('pants').debug("big pants"); - assert.equal(appenderEvent.data[0], "big pants"); - - appenderEvent = undefined; - - log4js.getLogger("something else").debug("pants"); - assert.isUndefined(appenderEvent); - } - } - }, - - 'default setup': { - topic: function() { - var appenderEvents = [], - fakeConsole = { - 'name': 'console', - 'appender': function () { - return function(evt) { - appenderEvents.push(evt); - }; - }, - 'configure': function (config) { - return fakeConsole.appender(); - } - }, - globalConsole = { - log: function() { } - }, - log4js = sandbox.require( - '../lib/log4js', - { - requires: { - './appenders/console': fakeConsole - }, - globals: { - console: globalConsole - } - } - ), - logger = log4js.getLogger('a-test'); - - logger.debug("this is a test"); - globalConsole.log("this should not be logged"); - - return appenderEvents; - }, - - 'should configure a console appender': function(appenderEvents) { - assert.equal(appenderEvents[0].data[0], 'this is a test'); - }, - - 'should not replace console.log with log4js version': function(appenderEvents) { - assert.equal(appenderEvents.length, 1); - } - }, - + 'console' : { topic: setupConsoleTest, @@ -498,17 +140,5 @@ vows.describe('log4js').addBatch({ 'should maintain appenders between requires': function (logEvent) { assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js"); } - }, - - 'getDefaultLogger': { - topic: function() { - return require('../lib/log4js').getDefaultLogger(); - }, - 'should return a logger': function(logger) { - assert.ok(logger.info); - assert.ok(logger.debug); - assert.ok(logger.error); - } } -*/ }).export(module); From 4e6e51e9fa3eddc5b49ec2bcefa9f0a57d03fbd0 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Aug 2013 11:47:06 +1000 Subject: [PATCH 20/53] test config file --- test/with-cheese.json | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 test/with-cheese.json diff --git a/test/with-cheese.json b/test/with-cheese.json new file mode 100644 index 00000000..0fb444c2 --- /dev/null +++ b/test/with-cheese.json @@ -0,0 +1,9 @@ +{ + "appenders": { + "thing": { "type": "cheese" } + }, + "categories": { + "default": { "level": "DEBUG", "appenders": [ "thing" ] }, + "noisy": { "level": "ERROR", "appenders": [ "thing" ] } + } +} From 6c09a6fb710ee45881d69d4b06686dac1972e08c Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 23 Aug 2013 08:42:26 +1000 Subject: [PATCH 21/53] fixed trailing comma --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 18354081..e103052b 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,7 @@ "dependencies": { "async": "0.1.15", "semver": "~1.1.4", - "readable-stream": "~1.0.2", + "readable-stream": "~1.0.2" }, "devDependencies": { "sandboxed-module": "0.1.3", From 9afbbb580e89ba1bfe9285fabe0b06c5c4231f62 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 23 Aug 2013 08:42:51 +1000 Subject: [PATCH 22/53] proven that category filter no longer needed --- test/categoryFilter-test.js | 153 +++++++++++++++++++----------------- 1 file changed, 83 insertions(+), 70 deletions(-) diff --git a/test/categoryFilter-test.js b/test/categoryFilter-test.js index 1ad10a0c..99a477c4 100644 --- a/test/categoryFilter-test.js +++ b/test/categoryFilter-test.js @@ -1,83 +1,96 @@ 'use strict'; -var vows = require('vows') +var async = require('async') +, should = require('should') , fs = require('fs') +, path = require('path') , assert = require('assert'); -function remove(filename) { - try { - fs.unlinkSync(filename); - } catch (e) { - //doesn't really matter if it failed - } +function remove() { + var files = Array.prototype.slice.call(arguments); + return function(done) { + async.forEach( + files.map(function(file) { return path.join(__dirname, file); }), + fs.unlink.bind(fs), + function() { done(); } + ); + }; } -vows.describe('log4js categoryFilter').addBatch({ - 'appender': { - topic: function() { - - var log4js = require('../lib/log4js'), logEvents = [], webLogger, appLogger; - log4js.clearAppenders(); - var appender = require('../lib/appenders/categoryFilter') - .appender( - ['app'], - function(evt) { logEvents.push(evt); } - ); - log4js.addAppender(appender, ["app","web"]); +describe('log4js', function() { + + before( + remove( + 'test-category-filter-web.log', + 'test-category-filter-all.log' + ) + ); + + after( + remove( + 'test-category-filter-web.log', + 'test-category-filter-all.log' + ) + ); + + describe('category filtering', function() { + before(function() { + var log4js = require('../lib/log4js') + , webLogger = log4js.getLogger("web") + , appLogger = log4js.getLogger("app"); - webLogger = log4js.getLogger("web"); - appLogger = log4js.getLogger("app"); + log4js.configure({ + appenders: { + rest: { + type: "file", + layout: { type: "messagePassThrough" }, + filename: path.join(__dirname, "test-category-filter-all.log") + }, + web: { + type: "file", + layout: { type: "messagePassThrough"}, + filename: path.join(__dirname, "test-category-filter-web.log") + } + }, + categories: { + "default": { level: "debug", appenders: [ "rest" ] }, + web: { level: "debug", appenders: [ "web" ] } + } + }); webLogger.debug('This should get logged'); appLogger.debug('This should not'); webLogger.debug('Hello again'); log4js.getLogger('db').debug('This shouldn\'t be included by the appender anyway'); - - return logEvents; - }, - 'should only pass matching category' : function(logEvents) { - assert.equal(logEvents.length, 2); - assert.equal(logEvents[0].data[0], 'This should get logged'); - assert.equal(logEvents[1].data[0], 'Hello again'); - } - }, - - 'configure': { - topic: function() { - var log4js = require('../lib/log4js') - , logger, weblogger; - - remove(__dirname + '/categoryFilter-web.log'); - remove(__dirname + '/categoryFilter-noweb.log'); - - log4js.configure('test/with-categoryFilter.json'); - logger = log4js.getLogger("app"); - weblogger = log4js.getLogger("web"); - - logger.info('Loading app'); - logger.debug('Initialising indexes'); - weblogger.info('00:00:00 GET / 200'); - weblogger.warn('00:00:00 GET / 500'); - //wait for the file system to catch up - setTimeout(this.callback, 100); - }, - 'tmp-tests.log': { - topic: function() { - fs.readFile(__dirname + '/categoryFilter-noweb.log', 'utf8', this.callback); - }, - 'should contain all log messages': function(contents) { - var messages = contents.trim().split('\n'); - assert.deepEqual(messages, ['Loading app','Initialising indexes']); - } - }, - 'tmp-tests-web.log': { - topic: function() { - fs.readFile(__dirname + '/categoryFilter-web.log','utf8',this.callback); - }, - 'should contain only error and warning log messages': function(contents) { - var messages = contents.trim().split('\n'); - assert.deepEqual(messages, ['00:00:00 GET / 200','00:00:00 GET / 500']); - } - } - } -}).export(module); + }); + + it('should only pass matching category', function(done) { + setTimeout(function() { + fs.readFile( + path.join(__dirname, 'test-category-filter-web.log'), + 'utf8', + function(err, contents) { + var lines = contents.trim().split('\n'); + lines.should.eql(["This should get logged", "Hello again"]); + done(err); + } + ); + }, 50); + }); + + it('should send everything else to default appender', function(done) { + setTimeout(function() { + fs.readFile( + path.join(__dirname, 'test-category-filter-all.log'), + 'utf8', + function(err, contents) { + var lines = contents.trim().split('\n'); + lines.should.eql(["This should not", "This shouldn't be included by the appender anyway"]); + done(err); + } + ); + }, 50); + }); + + }); +}); From d8cf8cb2dcb60c03ff1f92a6c6e7102e67c7272e Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 23 Aug 2013 08:43:19 +1000 Subject: [PATCH 23/53] removed category filter --- lib/appenders/categoryFilter.js | 20 -------------------- 1 file changed, 20 deletions(-) delete mode 100644 lib/appenders/categoryFilter.js diff --git a/lib/appenders/categoryFilter.js b/lib/appenders/categoryFilter.js deleted file mode 100644 index f854f65e..00000000 --- a/lib/appenders/categoryFilter.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -var log4js = require('../log4js'); - -function categoryFilter (excludes, appender) { - if (typeof(excludes) === 'string') excludes = [excludes]; - return function(logEvent) { - if (excludes.indexOf(logEvent.categoryName) === -1) { - appender(logEvent); - } - }; -} - -function configure(config) { - log4js.loadAppender(config.appender.type); - var appender = log4js.appenderMakers[config.appender.type](config.appender); - return categoryFilter(config.exclude, appender); -} - -exports.appender = categoryFilter; -exports.configure = configure; From eabcaf8aef8a95225edb073f9fa5e24a60110cf0 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sat, 24 Aug 2013 20:46:10 +1000 Subject: [PATCH 24/53] moved cluster support into core, removed clustered appender, multiprocess appender --- lib/appenders/clustered.js | 118 ------------- lib/appenders/multiprocess.js | 129 -------------- lib/log4js.js | 169 +++++------------- test/clusteredAppender-test.js | 259 +++++++++++++++------------- test/multiprocess-test.js | 303 --------------------------------- 5 files changed, 190 insertions(+), 788 deletions(-) delete mode 100755 lib/appenders/clustered.js delete mode 100644 lib/appenders/multiprocess.js delete mode 100644 test/multiprocess-test.js diff --git a/lib/appenders/clustered.js b/lib/appenders/clustered.js deleted file mode 100755 index f56e89a3..00000000 --- a/lib/appenders/clustered.js +++ /dev/null @@ -1,118 +0,0 @@ -"use strict"; - -var cluster = require('cluster'); -var log4js = require('../log4js'); - -/** - * Takes a loggingEvent object, returns string representation of it. - */ -function serializeLoggingEvent(loggingEvent) { - return JSON.stringify(loggingEvent); -} - -/** - * Takes a string, returns an object with - * the correct log properties. - * - * This method has been "borrowed" from the `multiprocess` appender - * by `nomiddlename` (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js) - * - * Apparently, node.js serializes everything to strings when using `process.send()`, - * so we need smart deserialization that will recreate log date and level for further processing by log4js internals. - */ -function deserializeLoggingEvent(loggingEventString) { - - var loggingEvent; - - try { - - loggingEvent = JSON.parse(loggingEventString); - loggingEvent.startTime = new Date(loggingEvent.startTime); - loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr); - - } catch (e) { - - // JSON.parse failed, just log the contents probably a naughty. - loggingEvent = { - startTime: new Date(), - categoryName: 'log4js', - level: log4js.levels.ERROR, - data: [ 'Unable to parse log:', loggingEventString ] - }; - } - return loggingEvent; -} - -/** - * Creates an appender. - * - * If the current process is a master (`cluster.isMaster`), then this will be a "master appender". - * Otherwise this will be a worker appender, that just sends loggingEvents to the master process. - * - * If you are using this method directly, make sure to provide it with `config.actualAppenders` array - * of actual appender instances. - * - * Or better use `configure(config, options)` - */ -function createAppender(config) { - - if (cluster.isMaster) { - - var masterAppender = function(loggingEvent) { - - if (config.actualAppenders) { - var size = config.actualAppenders.length; - for(var i = 0; i < size; i++) { - config.actualAppenders[i](loggingEvent); - } - } - } - - // Listen on new workers - cluster.on('fork', function(worker) { - - worker.on('message', function(message) { - if (message.type && message.type === '::log-message') { - // console.log("master : " + cluster.isMaster + " received message: " + JSON.stringify(message.event)); - - var loggingEvent = deserializeLoggingEvent(message.event); - masterAppender(loggingEvent); - } - }); - - }); - - return masterAppender; - - } else { - - return function(loggingEvent) { - // If inside the worker process, then send the logger event to master. - if (cluster.isWorker) { - // console.log("worker " + cluster.worker.id + " is sending message"); - process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent)}); - } - } - } -} - -function configure(config, options) { - - if (config.appenders && cluster.isMaster) { - - var size = config.appenders.length; - config.actualAppenders = new Array(size); - - for(var i = 0; i < size; i++) { - - log4js.loadAppender(config.appenders[i].type); - config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](config.appenders[i], options); - - } - } - - return createAppender(config); -} - -exports.appender = createAppender; -exports.configure = configure; \ No newline at end of file diff --git a/lib/appenders/multiprocess.js b/lib/appenders/multiprocess.js deleted file mode 100644 index 2a1d56f7..00000000 --- a/lib/appenders/multiprocess.js +++ /dev/null @@ -1,129 +0,0 @@ -"use strict"; -var log4js = require('../log4js') -, net = require('net') -, END_MSG = '__LOG4JS__'; - -/** - * Creates a server, listening on config.loggerPort, config.loggerHost. - * Output goes to config.actualAppender (config.appender is used to - * set up that appender). - */ -function logServer(config) { - - /** - * Takes a utf-8 string, returns an object with - * the correct log properties. - */ - function deserializeLoggingEvent(clientSocket, msg) { - var loggingEvent; - try { - loggingEvent = JSON.parse(msg); - loggingEvent.startTime = new Date(loggingEvent.startTime); - loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr); - } catch (e) { - // JSON.parse failed, just log the contents probably a naughty. - loggingEvent = { - startTime: new Date(), - categoryName: 'log4js', - level: log4js.levels.ERROR, - data: [ 'Unable to parse log:', msg ] - }; - } - - loggingEvent.remoteAddress = clientSocket.remoteAddress; - loggingEvent.remotePort = clientSocket.remotePort; - - return loggingEvent; - } - - var actualAppender = config.actualAppender, - server = net.createServer(function serverCreated(clientSocket) { - clientSocket.setEncoding('utf8'); - var logMessage = ''; - - function logTheMessage(msg) { - if (logMessage.length > 0) { - actualAppender(deserializeLoggingEvent(clientSocket, msg)); - } - } - - function chunkReceived(chunk) { - var event; - logMessage += chunk || ''; - if (logMessage.indexOf(END_MSG) > -1) { - event = logMessage.substring(0, logMessage.indexOf(END_MSG)); - logTheMessage(event); - logMessage = logMessage.substring(event.length + END_MSG.length) || ''; - //check for more, maybe it was a big chunk - chunkReceived(); - } - } - - clientSocket.on('data', chunkReceived); - clientSocket.on('end', chunkReceived); - }); - - server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost'); - - return actualAppender; -} - -function workerAppender(config) { - var canWrite = false, - buffer = [], - socket; - - createSocket(); - - function createSocket() { - socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost'); - socket.on('connect', function() { - emptyBuffer(); - canWrite = true; - }); - socket.on('timeout', socket.end.bind(socket)); - //don't bother listening for 'error', 'close' gets called after that anyway - socket.on('close', createSocket); - } - - function emptyBuffer() { - var evt; - while ((evt = buffer.shift())) { - write(evt); - } - } - - function write(loggingEvent) { - socket.write(JSON.stringify(loggingEvent), 'utf8'); - socket.write(END_MSG, 'utf8'); - } - - return function log(loggingEvent) { - if (canWrite) { - write(loggingEvent); - } else { - buffer.push(loggingEvent); - } - }; -} - -function createAppender(config) { - if (config.mode === 'master') { - return logServer(config); - } else { - return workerAppender(config); - } -} - -function configure(config, options) { - var actualAppender; - if (config.appender && config.mode === 'master') { - log4js.loadAppender(config.appender.type); - actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options); - config.actualAppender = actualAppender; - } - return createAppender(config); -} - -exports.appender = createAppender; -exports.configure = configure; diff --git a/lib/log4js.js b/lib/log4js.js index f92a999b..b7525c7c 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -48,6 +48,7 @@ */ var debug = require('./debug')('core') , fs = require('fs') +, cluster = require('cluster') , util = require('util') , layouts = require('./layouts') , levels = require('./levels') @@ -64,16 +65,57 @@ var debug = require('./debug')('core') } }; +function serialise(event) { + return JSON.stringify(event); +} + +function deserialise(serialised) { + var event; + try { + event = JSON.parse(serialised); + event.startTime = new Date(event.startTime); + event.level = levels.toLevel(event.level.levelStr); + } catch(e) { + event = { + startTime: new Date(), + category: 'log4js', + level: levels.ERROR, + data: [ 'Unable to parse log:', serialised ] + }; + } + + return event; +} + +//in a multi-process node environment, worker loggers will use +//process.send +cluster.on('fork', function(worker) { + debug('listening to worker: ' + worker); + worker.on('message', function(message) { + if (message.type && message.type === '::log4js-message') { + debug("received message: " + message.event); + dispatch(deserialise(message.event)); + } + }); +}); + /** * Get a logger instance. - * @param {String} categoryName name of category to log to. + * @param {String} category to log to. * @return {Logger} instance of logger for the category * @static */ -function getLogger (categoryName) { - debug("getLogger(" + categoryName + ")"); +function getLogger (category) { + debug("getLogger(" + category + ")"); - return new Logger(dispatch, categoryName || 'default'); + return new Logger( + cluster.isMaster ? dispatch : workerDispatch, + category || 'default' + ); +} + +function workerDispatch(event) { + process.send({ type: "::log4js-message", event: serialise(event) }); } /** @@ -92,67 +134,6 @@ function dispatch(event) { } } -/* -var configState = {}; - -function loadConfigurationFile(filename) { - if (filename) { - return JSON.parse(fs.readFileSync(filename, "utf8")); - } - return undefined; -} - -function configureOnceOff(config, options) { - if (config) { - try { - configureAppenders(config.appenders, options); - configureLevels(config.levels); - - if (config.replaceConsole) { - replaceConsole(); - } else { - restoreConsole(); - } - } catch (e) { - throw new Error( - "Problem reading log4js config " + util.inspect(config) + - ". Error was \"" + e.message + "\" (" + e.stack + ")" - ); - } - } -} - -function reloadConfiguration() { - var mtime = getMTime(configState.filename); - if (!mtime) return; - - if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) { - configureOnceOff(loadConfigurationFile(configState.filename)); - } - configState.lastMTime = mtime; -} - -function getMTime(filename) { - var mtime; - try { - mtime = fs.statSync(configState.filename).mtime; - } catch (e) { - getLogger('log4js').warn('Failed to load configuration file ' + filename); - } - return mtime; -} - -function initReloadConfiguration(filename, options) { - if (configState.timerId) { - clearInterval(configState.timerId); - delete configState.timerId; - } - configState.filename = filename; - configState.lastMTime = getMTime(filename); - configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000); -} -*/ - function load(file) { return JSON.parse(fs.readFileSync(file, "utf-8")); } @@ -178,25 +159,6 @@ function configure(configurationFileOrObject) { validateCategories(config.categories); categories = config.categories; -/* - var config = configurationFileOrObject; - config = config || process.env.LOG4JS_CONFIG; - options = options || {}; - - if (config === undefined || config === null || typeof(config) === 'string') { - if (options.reloadSecs) { - initReloadConfiguration(config, options); - } - config = loadConfigurationFile(config) || defaultConfig; - } else { - if (options.reloadSecs) { - getLogger('log4js').warn( - 'Ignoring configuration reload parameter for "object" configuration.' - ); - } - } - configureOnceOff(config, options); -*/ } function validateCategories(cats) { @@ -259,48 +221,9 @@ function loadAppender(appender) { appenderMakers[appender] = appenderModule.configure.bind(appenderModule); } -/* -var originalConsoleFunctions = { - log: console.log, - debug: console.debug, - info: console.info, - warn: console.warn, - error: console.error -}; - -function replaceConsole(logger) { - function replaceWith(fn) { - return function() { - fn.apply(logger, arguments); - }; - } - logger = logger || getLogger("console"); - ['log','debug','info','warn','error'].forEach(function (item) { - console[item] = replaceWith(item === 'log' ? logger.info : logger[item]); - }); -} - -function restoreConsole() { - ['log', 'debug', 'info', 'warn', 'error'].forEach(function (item) { - console[item] = originalConsoleFunctions[item]; - }); -} - -*/ module.exports = { getLogger: getLogger, configure: configure, -/* - replaceConsole: replaceConsole, - restoreConsole: restoreConsole, - - levels: levels, - - layouts: layouts, - appenders: {}, - appenderMakers: appenderMakers, - connectLogger: require('./connect-logger').connectLogger -*/ }; //set ourselves up diff --git a/test/clusteredAppender-test.js b/test/clusteredAppender-test.js index a0dd5fb7..c60fdea0 100755 --- a/test/clusteredAppender-test.js +++ b/test/clusteredAppender-test.js @@ -1,116 +1,145 @@ "use strict"; -var assert = require('assert'); -var vows = require('vows'); -var layouts = require('../lib/layouts'); -var sandbox = require('sandboxed-module'); -var LoggingEvent = require('../lib/logger').LoggingEvent; -var cluster = require('cluster'); - -vows.describe('log4js cluster appender').addBatch({ - 'when in master mode': { - topic: function() { - - var registeredClusterEvents = []; - var loggingEvents = []; - - // Fake cluster module, so no cluster listeners be really added - var fakeCluster = { - - on: function(event, callback) { - registeredClusterEvents.push(event); - }, - - isMaster: true, - isWorker: false, - - }; - - var fakeActualAppender = function(loggingEvent) { - loggingEvents.push(loggingEvent); - } - - // Load appender and fake modules in it - var appenderModule = sandbox.require('../lib/appenders/clustered', { - requires: { - 'cluster': fakeCluster, - } - }); - - var masterAppender = appenderModule.appender({ - actualAppenders: [ fakeActualAppender ] - }); - - // Actual test - log message using masterAppender - masterAppender(new LoggingEvent('wovs', 'Info', ['masterAppender test'])); - - var returnValue = { - registeredClusterEvents: registeredClusterEvents, - loggingEvents: loggingEvents, - }; - - return returnValue; - }, - - "should register 'fork' event listener on 'cluster'": function(topic) { - assert.equal(topic.registeredClusterEvents[0], 'fork'); - }, - - "should log using actual appender": function(topic) { - assert.equal(topic.loggingEvents[0].data[0], 'masterAppender test'); - }, - - }, - - 'when in worker mode': { - - topic: function() { - - var registeredProcessEvents = []; - - // Fake cluster module, to fake we're inside a worker process - var fakeCluster = { - - isMaster: false, - isWorker: true, - - }; - - var fakeProcess = { - - send: function(data) { - registeredProcessEvents.push(data); - }, - - }; - - // Load appender and fake modules in it - var appenderModule = sandbox.require('../lib/appenders/clustered', { - requires: { - 'cluster': fakeCluster, - }, - globals: { - 'process': fakeProcess, - } - }); - - var workerAppender = appenderModule.appender(); - - // Actual test - log message using masterAppender - workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test'])); - - var returnValue = { - registeredProcessEvents: registeredProcessEvents, - }; - - return returnValue; - - }, - - "worker appender should call process.send" : function(topic) { - assert.equal(topic.registeredProcessEvents[0].type, '::log-message'); - assert.equal(JSON.parse(topic.registeredProcessEvents[0].event).data[0], "workerAppender test"); - } - - } - -}).exportTo(module); +var should = require('should') +, sandbox = require('sandboxed-module'); + + +describe('log4js in a cluster', function() { + describe('when in master mode', function() { + + var log4js + , clusterOnFork = false + , workerCb + , events = [] + , worker = { + on: function(evt, cb) { + evt.should.eql('message'); + this.cb = cb; + } + }; + + before(function() { + log4js = sandbox.require( + '../lib/log4js', + { + requires: { + 'cluster': { + isMaster: true, + on: function(evt, cb) { + evt.should.eql('fork'); + clusterOnFork = true; + cb(worker); + } + }, + './appenders/console': { + configure: function() { + return function(event) { + events.push(event); + }; + } + } + } + } + ); + }); + + it('should listen for fork events', function() { + clusterOnFork.should.be.true; + }); + + it('should listen for messages from workers', function() { + //workerCb was created in a different context to the test + //(thanks to sandbox.require), so doesn't pick up the should prototype + (typeof worker.cb).should.eql('function'); + }); + + it('should log valid ::log4js-message events', function() { + worker.cb({ + type: '::log4js-message', + event: JSON.stringify({ + startTime: '2010-10-10 18:54:06', + category: 'cheese', + level: { levelStr: 'DEBUG' }, + data: [ "blah" ] + }) + }); + events.should.have.length(1); + events[0].data[0].should.eql("blah"); + events[0].category.should.eql('cheese'); + //startTime was created in a different context to the test + //(thanks to sandbox.require), so instanceof doesn't think + //it's a Date. + events[0].startTime.constructor.name.should.eql('Date'); + events[0].level.toString().should.eql('DEBUG'); + }); + + it('should handle invalid ::log4js-message events', function() { + worker.cb({ + type: '::log4js-message', + event: "biscuits" + }); + worker.cb({ + type: '::log4js-message', + event: JSON.stringify({ + startTime: 'whatever' + }) + }); + + events.should.have.length(3); + events[1].data[0].should.eql('Unable to parse log:'); + events[1].data[1].should.eql('biscuits'); + events[1].category.should.eql('log4js'); + events[1].level.toString().should.eql('ERROR'); + + events[2].data[0].should.eql('Unable to parse log:'); + events[2].data[1].should.eql(JSON.stringify({ startTime: 'whatever'})); + + }); + + it('should ignore other events', function() { + worker.cb({ + type: "::blah-blah", + event: "blah" + }); + + events.should.have.length(3); + }); + + }); + + describe('when in worker mode', function() { + var log4js, events = []; + + before(function() { + log4js = sandbox.require( + '../lib/log4js', + { + requires: { + 'cluster': { + isMaster: false, + on: function() {} + } + }, + globals: { + 'process': { + 'send': function(event) { + events.push(event); + } + } + } + } + ); + log4js.getLogger('test').debug("just testing"); + }); + + it('should emit ::log4js-message events', function() { + events.should.have.length(1); + events[0].type.should.eql('::log4js-message'); + events[0].event.should.be.a('string'); + + var evt = JSON.parse(events[0].event); + evt.category.should.eql('test'); + evt.level.levelStr.should.eql('DEBUG'); + evt.data[0].should.eql('just testing'); + }); + }); +}); diff --git a/test/multiprocess-test.js b/test/multiprocess-test.js deleted file mode 100644 index 600cae5f..00000000 --- a/test/multiprocess-test.js +++ /dev/null @@ -1,303 +0,0 @@ -"use strict"; -var vows = require('vows') -, sandbox = require('sandboxed-module') -, assert = require('assert') -; - -function makeFakeNet() { - return { - logEvents: [], - data: [], - cbs: {}, - createConnectionCalled: 0, - fakeAppender: function(logEvent) { - this.logEvents.push(logEvent); - }, - createConnection: function(port, host) { - var fakeNet = this; - this.port = port; - this.host = host; - this.createConnectionCalled += 1; - return { - on: function(evt, cb) { - fakeNet.cbs[evt] = cb; - }, - write: function(data, encoding) { - fakeNet.data.push(data); - fakeNet.encoding = encoding; - }, - end: function() { - fakeNet.closeCalled = true; - } - }; - }, - createServer: function(cb) { - var fakeNet = this; - cb({ - remoteAddress: '1.2.3.4', - remotePort: '1234', - setEncoding: function(encoding) { - fakeNet.encoding = encoding; - }, - on: function(event, cb) { - fakeNet.cbs[event] = cb; - } - }); - - return { - listen: function(port, host) { - fakeNet.port = port; - fakeNet.host = host; - } - }; - } - }; -} - -vows.describe('Multiprocess Appender').addBatch({ - 'worker': { - topic: function() { - var fakeNet = makeFakeNet(), - appender = sandbox.require( - '../lib/appenders/multiprocess', - { - requires: { - 'net': fakeNet - } - } - ).appender({ mode: 'worker', loggerPort: 1234, loggerHost: 'pants' }); - - //don't need a proper log event for the worker tests - appender('before connect'); - fakeNet.cbs.connect(); - appender('after connect'); - fakeNet.cbs.close(true); - appender('after error, before connect'); - fakeNet.cbs.connect(); - appender('after error, after connect'); - - return fakeNet; - }, - 'should open a socket to the loggerPort and loggerHost': function(net) { - assert.equal(net.port, 1234); - assert.equal(net.host, 'pants'); - }, - 'should buffer messages written before socket is connected': function(net) { - assert.equal(net.data[0], JSON.stringify('before connect')); - }, - 'should write log messages to socket as json strings with a terminator string': function(net) { - assert.equal(net.data[0], JSON.stringify('before connect')); - assert.equal(net.data[1], '__LOG4JS__'); - assert.equal(net.data[2], JSON.stringify('after connect')); - assert.equal(net.data[3], '__LOG4JS__'); - assert.equal(net.encoding, 'utf8'); - }, - 'should attempt to re-open the socket on error': function(net) { - assert.equal(net.data[4], JSON.stringify('after error, before connect')); - assert.equal(net.data[5], '__LOG4JS__'); - assert.equal(net.data[6], JSON.stringify('after error, after connect')); - assert.equal(net.data[7], '__LOG4JS__'); - assert.equal(net.createConnectionCalled, 2); - } - }, - 'worker with timeout': { - topic: function() { - var fakeNet = makeFakeNet(), - appender = sandbox.require( - '../lib/appenders/multiprocess', - { - requires: { - 'net': fakeNet - } - } - ).appender({ mode: 'worker' }); - - //don't need a proper log event for the worker tests - appender('before connect'); - fakeNet.cbs.connect(); - appender('after connect'); - fakeNet.cbs.timeout(); - appender('after timeout, before close'); - fakeNet.cbs.close(); - appender('after close, before connect'); - fakeNet.cbs.connect(); - appender('after close, after connect'); - - return fakeNet; - }, - 'should attempt to re-open the socket': function(net) { - //skipping the __LOG4JS__ separators - assert.equal(net.data[0], JSON.stringify('before connect')); - assert.equal(net.data[2], JSON.stringify('after connect')); - assert.equal(net.data[4], JSON.stringify('after timeout, before close')); - assert.equal(net.data[6], JSON.stringify('after close, before connect')); - assert.equal(net.data[8], JSON.stringify('after close, after connect')); - assert.equal(net.createConnectionCalled, 2); - } - }, - 'worker defaults': { - topic: function() { - var fakeNet = makeFakeNet(), - appender = sandbox.require( - '../lib/appenders/multiprocess', - { - requires: { - 'net': fakeNet - } - } - ).appender({ mode: 'worker' }); - - return fakeNet; - }, - 'should open a socket to localhost:5000': function(net) { - assert.equal(net.port, 5000); - assert.equal(net.host, 'localhost'); - } - }, - 'master': { - topic: function() { - var fakeNet = makeFakeNet(), - appender = sandbox.require( - '../lib/appenders/multiprocess', - { - requires: { - 'net': fakeNet - } - } - ).appender({ mode: 'master', - loggerHost: 'server', - loggerPort: 1234, - actualAppender: fakeNet.fakeAppender.bind(fakeNet) - }); - - appender('this should be sent to the actual appender directly'); - - return fakeNet; - }, - 'should listen for log messages on loggerPort and loggerHost': function(net) { - assert.equal(net.port, 1234); - assert.equal(net.host, 'server'); - }, - 'should return the underlying appender': function(net) { - assert.equal(net.logEvents[0], 'this should be sent to the actual appender directly'); - }, - 'when a client connects': { - topic: function(net) { - var logString = JSON.stringify( - { level: { level: 10000, levelStr: 'DEBUG' } - , data: ['some debug']} - ) + '__LOG4JS__'; - - net.cbs.data( - JSON.stringify( - { level: { level: 40000, levelStr: 'ERROR' } - , data: ['an error message'] } - ) + '__LOG4JS__' - ); - net.cbs.data(logString.substring(0, 10)); - net.cbs.data(logString.substring(10)); - net.cbs.data(logString + logString + logString); - net.cbs.end( - JSON.stringify( - { level: { level: 50000, levelStr: 'FATAL' } - , data: ["that's all folks"] } - ) + '__LOG4JS__' - ); - net.cbs.data('bad message__LOG4JS__'); - return net; - }, - 'should parse log messages into log events and send to appender': function(net) { - assert.equal(net.logEvents[1].level.toString(), 'ERROR'); - assert.equal(net.logEvents[1].data[0], 'an error message'); - assert.equal(net.logEvents[1].remoteAddress, '1.2.3.4'); - assert.equal(net.logEvents[1].remotePort, '1234'); - }, - 'should parse log messages split into multiple chunks': function(net) { - assert.equal(net.logEvents[2].level.toString(), 'DEBUG'); - assert.equal(net.logEvents[2].data[0], 'some debug'); - assert.equal(net.logEvents[2].remoteAddress, '1.2.3.4'); - assert.equal(net.logEvents[2].remotePort, '1234'); - }, - 'should parse multiple log messages in a single chunk': function(net) { - assert.equal(net.logEvents[3].data[0], 'some debug'); - assert.equal(net.logEvents[4].data[0], 'some debug'); - assert.equal(net.logEvents[5].data[0], 'some debug'); - }, - 'should handle log messages sent as part of end event': function(net) { - assert.equal(net.logEvents[6].data[0], "that's all folks"); - }, - 'should handle unparseable log messages': function(net) { - assert.equal(net.logEvents[7].level.toString(), 'ERROR'); - assert.equal(net.logEvents[7].categoryName, 'log4js'); - assert.equal(net.logEvents[7].data[0], 'Unable to parse log:'); - assert.equal(net.logEvents[7].data[1], 'bad message'); - } - } - }, - 'master defaults': { - topic: function() { - var fakeNet = makeFakeNet(), - appender = sandbox.require( - '../lib/appenders/multiprocess', - { - requires: { - 'net': fakeNet - } - } - ).appender({ mode: 'master' }); - - return fakeNet; - }, - 'should listen for log messages on localhost:5000': function(net) { - assert.equal(net.port, 5000); - assert.equal(net.host, 'localhost'); - } - } -}).addBatch({ - 'configure': { - topic: function() { - var results = {} - , fakeNet = makeFakeNet() - , appender = sandbox.require( - '../lib/appenders/multiprocess', - { - requires: { - 'net': fakeNet, - '../log4js': { - loadAppender: function(app) { - results.appenderLoaded = app; - }, - appenderMakers: { - 'madeupappender': function(config, options) { - results.config = config; - results.options = options; - } - } - } - } - } - ).configure( - { - mode: 'master', - appender: { - type: 'madeupappender', - cheese: 'gouda' - } - }, - { crackers: 'jacobs' } - ); - - return results; - - }, - 'should load underlying appender for master': function(results) { - assert.equal(results.appenderLoaded, 'madeupappender'); - }, - 'should pass config to underlying appender': function(results) { - assert.equal(results.config.cheese, 'gouda'); - }, - 'should pass options to underlying appender': function(results) { - assert.equal(results.options.crackers, 'jacobs'); - } - } -}).exportTo(module); From 50a8164b4b348f766350a1de864fe95a6e1e0768 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sat, 24 Aug 2013 20:46:36 +1000 Subject: [PATCH 25/53] keeping track of changes in the new version --- 0.7-changes | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 0.7-changes diff --git a/0.7-changes b/0.7-changes new file mode 100644 index 00000000..4be73d3b --- /dev/null +++ b/0.7-changes @@ -0,0 +1,10 @@ +LogEvent.categoryName -> LogEvent.category +Logger is immutable (no setLevel any more) +Log levels defined in configure call, nowhere else +References to Loggers not retained +Clustered appender, multiprocess appender removed - core handles clusters now +Default category needs to be defined, with appender +connect logger, gelf, smtp, hookio appenders removed from core. +reload configuration removed from core - use 'watchr' or something instead +appenders now only need to provide configure function +log4js.configure now only takes single argument (no options) From 3b4a30587a06f9fe1c989623ecd5c91e5a1df4a5 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sat, 24 Aug 2013 20:49:08 +1000 Subject: [PATCH 26/53] removed unneeded tests --- test/logging-test.js | 144 ------------- test/reloadConfiguration-test.js | 340 ------------------------------- 2 files changed, 484 deletions(-) delete mode 100644 test/logging-test.js delete mode 100644 test/reloadConfiguration-test.js diff --git a/test/logging-test.js b/test/logging-test.js deleted file mode 100644 index 382ec0f3..00000000 --- a/test/logging-test.js +++ /dev/null @@ -1,144 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, sandbox = require('sandboxed-module'); - -function setupConsoleTest() { - var fakeConsole = {} - , logEvents = [] - , log4js; - - ['trace','debug','log','info','warn','error'].forEach(function(fn) { - fakeConsole[fn] = function() { - throw new Error("this should not be called."); - }; - }); - - log4js = sandbox.require( - '../lib/log4js', - { - globals: { - console: fakeConsole - } - } - ); - - log4js.clearAppenders(); - log4js.addAppender(function(evt) { - logEvents.push(evt); - }); - - return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole }; -} - -vows.describe('log4js').addBatch({ - - 'console' : { - topic: setupConsoleTest, - - 'when replaceConsole called': { - topic: function(test) { - test.log4js.replaceConsole(); - - test.fakeConsole.log("Some debug message someone put in a module"); - test.fakeConsole.debug("Some debug"); - test.fakeConsole.error("An error"); - test.fakeConsole.info("some info"); - test.fakeConsole.warn("a warning"); - - test.fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis"); - test.fakeConsole.log({ lumpy: "tapioca" }); - test.fakeConsole.log("count %d", 123); - test.fakeConsole.log("stringify %j", { lumpy: "tapioca" }); - - return test.logEvents; - }, - - 'should replace console.log methods with log4js ones': function(logEvents) { - assert.equal(logEvents.length, 9); - assert.equal(logEvents[0].data[0], "Some debug message someone put in a module"); - assert.equal(logEvents[0].level.toString(), "INFO"); - assert.equal(logEvents[1].data[0], "Some debug"); - assert.equal(logEvents[1].level.toString(), "DEBUG"); - assert.equal(logEvents[2].data[0], "An error"); - assert.equal(logEvents[2].level.toString(), "ERROR"); - assert.equal(logEvents[3].data[0], "some info"); - assert.equal(logEvents[3].level.toString(), "INFO"); - assert.equal(logEvents[4].data[0], "a warning"); - assert.equal(logEvents[4].level.toString(), "WARN"); - assert.equal(logEvents[5].data[0], "cheese (%s) and biscuits (%s)"); - assert.equal(logEvents[5].data[1], "gouda"); - assert.equal(logEvents[5].data[2], "garibaldis"); - } - }, - 'when turned off': { - topic: function(test) { - test.log4js.restoreConsole(); - try { - test.fakeConsole.log("This should cause the error described in the setup"); - } catch (e) { - return e; - } - }, - 'should call the original console methods': function (err) { - assert.instanceOf(err, Error); - assert.equal(err.message, "this should not be called."); - } - } - }, - 'console configuration': { - topic: setupConsoleTest, - 'when disabled': { - topic: function(test) { - test.log4js.replaceConsole(); - test.log4js.configure({ replaceConsole: false }); - try { - test.fakeConsole.log("This should cause the error described in the setup"); - } catch (e) { - return e; - } - }, - 'should allow for turning off console replacement': function (err) { - assert.instanceOf(err, Error); - assert.equal(err.message, 'this should not be called.'); - } - }, - 'when enabled': { - topic: function(test) { - test.log4js.restoreConsole(); - test.log4js.configure({ replaceConsole: true }); - //log4js.configure clears all appenders - test.log4js.addAppender(function(evt) { - test.logEvents.push(evt); - }); - - test.fakeConsole.debug("Some debug"); - return test.logEvents; - }, - - 'should allow for turning on console replacement': function (logEvents) { - assert.equal(logEvents.length, 1); - assert.equal(logEvents[0].level.toString(), "DEBUG"); - assert.equal(logEvents[0].data[0], "Some debug"); - } - } - }, - 'configuration persistence' : { - topic: function() { - var logEvent, - firstLog4js = require('../lib/log4js'), - secondLog4js; - - firstLog4js.clearAppenders(); - firstLog4js.addAppender(function(evt) { logEvent = evt; }); - - secondLog4js = require('../lib/log4js'); - secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js"); - - return logEvent; - }, - 'should maintain appenders between requires': function (logEvent) { - assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js"); - } - } -}).export(module); diff --git a/test/reloadConfiguration-test.js b/test/reloadConfiguration-test.js deleted file mode 100644 index 060f0895..00000000 --- a/test/reloadConfiguration-test.js +++ /dev/null @@ -1,340 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, sandbox = require('sandboxed-module'); - -function setupConsoleTest() { - var fakeConsole = {} - , logEvents = [] - , log4js; - - ['trace','debug','log','info','warn','error'].forEach(function(fn) { - fakeConsole[fn] = function() { - throw new Error("this should not be called."); - }; - }); - - log4js = sandbox.require( - '../lib/log4js', - { - globals: { - console: fakeConsole - } - } - ); - - log4js.clearAppenders(); - log4js.addAppender(function(evt) { - logEvents.push(evt); - }); - - return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole }; -} - -vows.describe('reload configuration').addBatch({ - 'with config file changing' : { - topic: function() { - var pathsChecked = [], - logEvents = [], - logger, - modulePath = 'path/to/log4js.json', - fakeFS = { - lastMtime: Date.now(), - config: { - appenders: [ - { type: 'console', layout: { type: 'messagePassThrough' } } - ], - levels: { 'a-test' : 'INFO' } - }, - readFileSync: function (file, encoding) { - assert.equal(file, modulePath); - assert.equal(encoding, 'utf8'); - return JSON.stringify(fakeFS.config); - }, - statSync: function (path) { - pathsChecked.push(path); - if (path === modulePath) { - fakeFS.lastMtime += 1; - return { mtime: new Date(fakeFS.lastMtime) }; - } else { - throw new Error("no such file"); - } - } - }, - fakeConsole = { - 'name': 'console', - 'appender': function () { - return function(evt) { logEvents.push(evt); }; - }, - 'configure': function (config) { - return fakeConsole.appender(); - } - }, - setIntervalCallback, - fakeSetInterval = function(cb, timeout) { - setIntervalCallback = cb; - }, - log4js = sandbox.require( - '../lib/log4js', - { - requires: { - 'fs': fakeFS, - './appenders/console': fakeConsole - }, - globals: { - 'console': fakeConsole, - 'setInterval' : fakeSetInterval, - } - } - ); - - log4js.configure('path/to/log4js.json', { reloadSecs: 30 }); - logger = log4js.getLogger('a-test'); - logger.info("info1"); - logger.debug("debug2 - should be ignored"); - fakeFS.config.levels['a-test'] = "DEBUG"; - setIntervalCallback(); - logger.info("info3"); - logger.debug("debug4"); - - return logEvents; - }, - 'should configure log4js from first log4js.json found': function(logEvents) { - assert.equal(logEvents[0].data[0], 'info1'); - assert.equal(logEvents[1].data[0], 'info3'); - assert.equal(logEvents[2].data[0], 'debug4'); - assert.equal(logEvents.length, 3); - } - }, - - 'with config file staying the same' : { - topic: function() { - var pathsChecked = [], - fileRead = 0, - logEvents = [], - logger, - modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'), - mtime = new Date(), - fakeFS = { - config: { - appenders: [ - { type: 'console', layout: { type: 'messagePassThrough' } } - ], - levels: { 'a-test' : 'INFO' } - }, - readFileSync: function (file, encoding) { - fileRead += 1; - assert.isString(file); - assert.equal(file, modulePath); - assert.equal(encoding, 'utf8'); - return JSON.stringify(fakeFS.config); - }, - statSync: function (path) { - pathsChecked.push(path); - if (path === modulePath) { - return { mtime: mtime }; - } else { - throw new Error("no such file"); - } - } - }, - fakeConsole = { - 'name': 'console', - 'appender': function () { - return function(evt) { logEvents.push(evt); }; - }, - 'configure': function (config) { - return fakeConsole.appender(); - } - }, - setIntervalCallback, - fakeSetInterval = function(cb, timeout) { - setIntervalCallback = cb; - }, - log4js = sandbox.require( - '../lib/log4js', - { - requires: { - 'fs': fakeFS, - './appenders/console': fakeConsole - }, - globals: { - 'console': fakeConsole, - 'setInterval' : fakeSetInterval, - } - } - ); - - log4js.configure(modulePath, { reloadSecs: 3 }); - logger = log4js.getLogger('a-test'); - logger.info("info1"); - logger.debug("debug2 - should be ignored"); - setIntervalCallback(); - logger.info("info3"); - logger.debug("debug4"); - - return [ pathsChecked, logEvents, modulePath, fileRead ]; - }, - 'should only read the configuration file once': function(args) { - var fileRead = args[3]; - assert.equal(fileRead, 1); - }, - 'should configure log4js from first log4js.json found': function(args) { - var logEvents = args[1]; - assert.equal(logEvents.length, 2); - assert.equal(logEvents[0].data[0], 'info1'); - assert.equal(logEvents[1].data[0], 'info3'); - } - }, - - 'when config file is removed': { - topic: function() { - var pathsChecked = [], - fileRead = 0, - logEvents = [], - logger, - modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'), - mtime = new Date(), - fakeFS = { - config: { - appenders: [ - { type: 'console', layout: { type: 'messagePassThrough' } } - ], - levels: { 'a-test' : 'INFO' } - }, - readFileSync: function (file, encoding) { - fileRead += 1; - assert.isString(file); - assert.equal(file, modulePath); - assert.equal(encoding, 'utf8'); - return JSON.stringify(fakeFS.config); - }, - statSync: function (path) { - this.statSync = function() { - throw new Error("no such file"); - }; - return { mtime: new Date() }; - } - }, - fakeConsole = { - 'name': 'console', - 'appender': function () { - return function(evt) { logEvents.push(evt); }; - }, - 'configure': function (config) { - return fakeConsole.appender(); - } - }, - setIntervalCallback, - fakeSetInterval = function(cb, timeout) { - setIntervalCallback = cb; - }, - log4js = sandbox.require( - '../lib/log4js', - { - requires: { - 'fs': fakeFS, - './appenders/console': fakeConsole - }, - globals: { - 'console': fakeConsole, - 'setInterval' : fakeSetInterval, - } - } - ); - - log4js.configure(modulePath, { reloadSecs: 3 }); - logger = log4js.getLogger('a-test'); - logger.info("info1"); - logger.debug("debug2 - should be ignored"); - setIntervalCallback(); - logger.info("info3"); - logger.debug("debug4"); - - return [ pathsChecked, logEvents, modulePath, fileRead ]; - }, - 'should only read the configuration file once': function(args) { - var fileRead = args[3]; - assert.equal(fileRead, 1); - }, - 'should not clear configuration when config file not found': function(args) { - var logEvents = args[1]; - assert.equal(logEvents.length, 3); - assert.equal(logEvents[0].data[0], 'info1'); - assert.equal(logEvents[1].level.toString(), 'WARN'); - assert.include(logEvents[1].data[0], 'Failed to load configuration file'); - assert.equal(logEvents[2].data[0], 'info3'); - } - }, - - 'when passed an object': { - topic: function() { - var test = setupConsoleTest(); - test.log4js.configure({}, { reloadSecs: 30 }); - return test.logEvents; - }, - 'should log a warning': function(events) { - assert.equal(events[0].level.toString(), 'WARN'); - assert.equal( - events[0].data[0], - 'Ignoring configuration reload parameter for "object" configuration.' - ); - } - }, - - 'when called twice with reload options': { - topic: function() { - var modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'), - fakeFS = { - readFileSync: function (file, encoding) { - return JSON.stringify({}); - }, - statSync: function (path) { - return { mtime: new Date() }; - } - }, - fakeConsole = { - 'name': 'console', - 'appender': function () { - return function(evt) { }; - }, - 'configure': function (config) { - return fakeConsole.appender(); - } - }, - setIntervalCallback, - intervalCleared = false, - clearedId, - fakeSetInterval = function(cb, timeout) { - setIntervalCallback = cb; - return 1234; - }, - log4js = sandbox.require( - '../lib/log4js', - { - requires: { - 'fs': fakeFS, - './appenders/console': fakeConsole - }, - globals: { - 'console': fakeConsole, - 'setInterval' : fakeSetInterval, - 'clearInterval': function(interval) { - intervalCleared = true; - clearedId = interval; - } - } - } - ); - - log4js.configure(modulePath, { reloadSecs: 3 }); - log4js.configure(modulePath, { reloadSecs: 15 }); - - return { cleared: intervalCleared, id: clearedId }; - }, - 'should clear the previous interval': function(result) { - assert.isTrue(result.cleared); - assert.equal(result.id, 1234); - } - } -}).exportTo(module); From 5a2771cfed9b946faf5879f6f411d8da8b074680 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sat, 24 Aug 2013 20:51:25 +1000 Subject: [PATCH 27/53] moved nolog test to log4js-connect --- test/nolog-test.js | 261 --------------------------------------------- 1 file changed, 261 deletions(-) delete mode 100644 test/nolog-test.js diff --git a/test/nolog-test.js b/test/nolog-test.js deleted file mode 100644 index 3c1a8e78..00000000 --- a/test/nolog-test.js +++ /dev/null @@ -1,261 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, levels = require('../lib/levels'); - -function MockLogger() { - - var that = this; - this.messages = []; - - this.log = function(level, message, exception) { - that.messages.push({ level: level, message: message }); - }; - - this.isLevelEnabled = function(level) { - return level.isGreaterThanOrEqualTo(that.level); - }; - - this.level = levels.TRACE; - -} - -function MockRequest(remoteAddr, method, originalUrl) { - - this.socket = { remoteAddress: remoteAddr }; - this.originalUrl = originalUrl; - this.method = method; - this.httpVersionMajor = '5'; - this.httpVersionMinor = '0'; - this.headers = {}; -} - -function MockResponse(statusCode) { - - this.statusCode = statusCode; - - this.end = function(chunk, encoding) { - - }; -} - -vows.describe('log4js connect logger').addBatch({ - 'getConnectLoggerModule': { - topic: function() { - var clm = require('../lib/connect-logger'); - return clm; - }, - - 'should return a "connect logger" factory' : function(clm) { - assert.isObject(clm); - }, - - 'nolog String' : { - topic: function(clm) { - var ml = new MockLogger(); - var cl = clm.connectLogger(ml, { nolog: "\\.gif" }); - return {cl: cl, ml: ml}; - }, - - 'check unmatch url request': { - topic: function(d){ - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages){ - assert.isArray(messages); - assert.equal(messages.length, 1); - assert.ok(levels.INFO.isEqualTo(messages[0].level)); - assert.include(messages[0].message, 'GET'); - assert.include(messages[0].message, 'http://url'); - assert.include(messages[0].message, 'my.remote.addr'); - assert.include(messages[0].message, '200'); - messages.pop(); - } - }, - - 'check match url request': { - topic: function(d) { - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages) { - assert.isArray(messages); - assert.equal(messages.length, 0); - } - } - }, - - 'nolog Strings' : { - topic: function(clm) { - var ml = new MockLogger(); - var cl = clm.connectLogger(ml, {nolog: "\\.gif|\\.jpe?g"}); - return {cl: cl, ml: ml}; - }, - - 'check unmatch url request (png)': { - topic: function(d){ - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages){ - assert.isArray(messages); - assert.equal(messages.length, 1); - assert.ok(levels.INFO.isEqualTo(messages[0].level)); - assert.include(messages[0].message, 'GET'); - assert.include(messages[0].message, 'http://url'); - assert.include(messages[0].message, 'my.remote.addr'); - assert.include(messages[0].message, '200'); - messages.pop(); - } - }, - - 'check match url request (gif)': { - topic: function(d) { - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages) { - assert.isArray(messages); - assert.equal(messages.length, 0); - } - }, - 'check match url request (jpeg)': { - topic: function(d) { - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages) { - assert.isArray(messages); - assert.equal(messages.length, 0); - } - } - }, - 'nolog Array' : { - topic: function(clm) { - var ml = new MockLogger(); - var cl = clm.connectLogger(ml, {nolog: ["\\.gif", "\\.jpe?g"]}); - return {cl: cl, ml: ml}; - }, - - 'check unmatch url request (png)': { - topic: function(d){ - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages){ - assert.isArray(messages); - assert.equal(messages.length, 1); - assert.ok(levels.INFO.isEqualTo(messages[0].level)); - assert.include(messages[0].message, 'GET'); - assert.include(messages[0].message, 'http://url'); - assert.include(messages[0].message, 'my.remote.addr'); - assert.include(messages[0].message, '200'); - messages.pop(); - } - }, - - 'check match url request (gif)': { - topic: function(d) { - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages) { - assert.isArray(messages); - assert.equal(messages.length, 0); - } - }, - - 'check match url request (jpeg)': { - topic: function(d) { - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages) { - assert.isArray(messages); - assert.equal(messages.length, 0); - } - }, - }, - 'nolog RegExp' : { - topic: function(clm) { - var ml = new MockLogger(); - var cl = clm.connectLogger(ml, {nolog: /\.gif|\.jpe?g/}); - return {cl: cl, ml: ml}; - }, - - 'check unmatch url request (png)': { - topic: function(d){ - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages){ - assert.isArray(messages); - assert.equal(messages.length, 1); - assert.ok(levels.INFO.isEqualTo(messages[0].level)); - assert.include(messages[0].message, 'GET'); - assert.include(messages[0].message, 'http://url'); - assert.include(messages[0].message, 'my.remote.addr'); - assert.include(messages[0].message, '200'); - messages.pop(); - } - }, - - 'check match url request (gif)': { - topic: function(d) { - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages) { - assert.isArray(messages); - assert.equal(messages.length, 0); - } - }, - - 'check match url request (jpeg)': { - topic: function(d) { - var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif - var res = new MockResponse(200); - d.cl(req, res, function() { }); - res.end('chunk', 'encoding'); - return d.ml.messages; - }, - 'check message': function(messages) { - assert.isArray(messages); - assert.equal(messages.length, 0); - } - } - } - } - -}).export(module); From 50074842ad516a00c3d4095ab5edd00fc8487568 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 25 Aug 2013 11:55:26 +1000 Subject: [PATCH 28/53] replaced my debug lib with standard one --- lib/debug.js | 15 ------ lib/log4js.js | 23 +++++---- lib/logger.js | 7 ++- lib/streams/BaseRollingFileStream.js | 2 +- lib/streams/DateRollingFileStream.js | 12 ++--- lib/streams/RollingFileStream.js | 6 +-- package.json | 3 +- test/debug-test.js | 72 ---------------------------- test/log4js-test.js | 3 +- 9 files changed, 31 insertions(+), 112 deletions(-) delete mode 100644 lib/debug.js delete mode 100644 test/debug-test.js diff --git a/lib/debug.js b/lib/debug.js deleted file mode 100644 index e3e65816..00000000 --- a/lib/debug.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; - -module.exports = function(label) { - var debug; - - if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) { - debug = function(message) { - console.error('LOG4JS: (%s) %s', label, message); - }; - } else { - debug = function() { }; - } - - return debug; -}; diff --git a/lib/log4js.js b/lib/log4js.js index b7525c7c..511a54c9 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -46,7 +46,7 @@ * @static * Website: http://log4js.berlios.de */ -var debug = require('./debug')('core') +var debug = require('debug')('log4js:core') , fs = require('fs') , cluster = require('cluster') , util = require('util') @@ -90,10 +90,10 @@ function deserialise(serialised) { //in a multi-process node environment, worker loggers will use //process.send cluster.on('fork', function(worker) { - debug('listening to worker: ' + worker); + debug('listening to worker: ', worker); worker.on('message', function(message) { if (message.type && message.type === '::log4js-message') { - debug("received message: " + message.event); + debug("received message: ", message.event); dispatch(deserialise(message.event)); } }); @@ -106,7 +106,7 @@ cluster.on('fork', function(worker) { * @static */ function getLogger (category) { - debug("getLogger(" + category + ")"); + debug("getLogger(", category, ")"); return new Logger( cluster.isMaster ? dispatch : workerDispatch, @@ -123,9 +123,9 @@ function workerDispatch(event) { * This would be a good place to implement category hierarchies/wildcards, etc */ function dispatch(event) { - debug("event is " + util.inspect(event)); + debug("event is ", event); var category = categories[event.category] || categories.default; - debug("category.level[" + category.level + "] <= " + event.level + " ? " + category.level.isLessThanOrEqualTo(event.level)); + debug("category.level[", category.level, "] <= ", event.level, " ? ", category.level.isLessThanOrEqualTo(event.level)); if (category.level.isLessThanOrEqualTo(event.level)) { category.appenders.forEach(function(appender) { @@ -135,17 +135,24 @@ function dispatch(event) { } function load(file) { + debug("loading ", file); return JSON.parse(fs.readFileSync(file, "utf-8")); } function configure(configurationFileOrObject) { - var filename, config = configurationFileOrObject || process.env.LOG4JS_CONFIG; - + debug("configure(", configurationFileOrObject, ")"); + debug("process.env.LOG4JS_CONFIG = ", process.env.LOG4JS_CONFIG); + + var filename, config = process.env.LOG4JS_CONFIG || configurationFileOrObject; + + debug("config ", config); + if (!config || !(typeof config === 'string' || typeof config === 'object')) { throw new Error("You must specify configuration as an object or a filename."); } if (typeof config === 'string') { + debug("config is string"); filename = config; config = load(filename); } diff --git a/lib/logger.js b/lib/logger.js index 06db5845..6c138d8b 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -1,7 +1,6 @@ "use strict"; -var debug = require('./debug')('logger') -, levels = require('./levels') -, util = require('util'); +var debug = require('debug')('log4js:logger') +, levels = require('./levels'); module.exports = function Logger(dispatch, category) { if (typeof dispatch !== 'function') { @@ -16,7 +15,7 @@ module.exports = function Logger(dispatch, category) { var args = Array.prototype.slice.call(arguments) , logLevel = args.shift() , loggingEvent = new LoggingEvent(category, logLevel, args); - debug("Logging event " + loggingEvent + " to dispatch = " + util.inspect(dispatch)); + debug("Logging event ", loggingEvent, " to dispatch = ", dispatch); dispatch(loggingEvent); } diff --git a/lib/streams/BaseRollingFileStream.js b/lib/streams/BaseRollingFileStream.js index 5f036159..a9e4f14c 100644 --- a/lib/streams/BaseRollingFileStream.js +++ b/lib/streams/BaseRollingFileStream.js @@ -1,7 +1,7 @@ "use strict"; var fs = require('fs') , stream -, debug = require('../debug')('BaseRollingFileStream') +, debug = require('debug')('log4js:BaseRollingFileStream') , util = require('util') , semver = require('semver'); diff --git a/lib/streams/DateRollingFileStream.js b/lib/streams/DateRollingFileStream.js index 9da029a8..fc6dd936 100644 --- a/lib/streams/DateRollingFileStream.js +++ b/lib/streams/DateRollingFileStream.js @@ -1,6 +1,6 @@ "use strict"; var BaseRollingFileStream = require('./BaseRollingFileStream') -, debug = require('../debug')('DateRollingFileStream') +, debug = require('debug')('log4js:DateRollingFileStream') , format = require('../date_format') , async = require('async') , fs = require('fs') @@ -9,7 +9,7 @@ var BaseRollingFileStream = require('./BaseRollingFileStream') module.exports = DateRollingFileStream; function DateRollingFileStream(filename, pattern, options, now) { - debug("Now is " + now); + debug("Now is ", now); if (pattern && typeof(pattern) === 'object') { now = options; options = pattern; @@ -31,7 +31,7 @@ function DateRollingFileStream(filename, pattern, options, now) { options = null; } } - debug("this.now is " + this.now + ", now is " + now); + debug("this.now is ", this.now, ", now is ", now); DateRollingFileStream.super_.call(this, filename, options); } @@ -41,8 +41,8 @@ DateRollingFileStream.prototype.shouldRoll = function() { var lastTime = this.lastTimeWeWroteSomething, thisTime = format.asString(this.pattern, new Date(this.now())); - debug("DateRollingFileStream.shouldRoll with now = " + - this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime); + debug("DateRollingFileStream.shouldRoll with now = ", + this.now(), ", thisTime = ", thisTime, ", lastTime = ", lastTime); this.lastTimeWeWroteSomething = thisTime; this.previousTime = lastTime; @@ -81,7 +81,7 @@ DateRollingFileStream.prototype.roll = function(filename, callback) { } function renameTheCurrentFile(cb) { - debug("Renaming the " + filename + " -> " + newFilename); + debug("Renaming the ", filename, " -> ", newFilename); fs.rename(filename, newFilename, cb); } diff --git a/lib/streams/RollingFileStream.js b/lib/streams/RollingFileStream.js index 64a0725a..26d2bb17 100644 --- a/lib/streams/RollingFileStream.js +++ b/lib/streams/RollingFileStream.js @@ -1,6 +1,6 @@ "use strict"; var BaseRollingFileStream = require('./BaseRollingFileStream') -, debug = require('../debug')('RollingFileStream') +, debug = require('debug')('log4js:RollingFileStream') , util = require('util') , path = require('path') , fs = require('fs') @@ -53,13 +53,13 @@ RollingFileStream.prototype.roll = function(filename, callback) { function increaseFileIndex (fileToRename, cb) { var idx = index(fileToRename); - debug('Index of ' + fileToRename + ' is ' + idx); + debug('Index of ', fileToRename, ' is ', idx); if (idx < that.backups) { //on windows, you can get a EEXIST error if you rename a file to an existing file //so, we'll try to delete the file we're renaming to first fs.unlink(filename + '.' + (idx+1), function (err) { //ignore err: if we could not delete, it's most likely that it doesn't exist - debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1)); + debug('Renaming ', fileToRename, ' -> ', filename, '.', (idx+1)); fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb); }); } else { diff --git a/package.json b/package.json index e103052b..6867d0d5 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,8 @@ "dependencies": { "async": "0.1.15", "semver": "~1.1.4", - "readable-stream": "~1.0.2" + "readable-stream": "~1.0.2", + "debug": "~0.7.2" }, "devDependencies": { "sandboxed-module": "0.1.3", diff --git a/test/debug-test.js b/test/debug-test.js deleted file mode 100644 index 92dd915b..00000000 --- a/test/debug-test.js +++ /dev/null @@ -1,72 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, sandbox = require('sandboxed-module') -, fakeConsole = { - error: function(format, label, message) { - this.logged = [ format, label, message ]; - } -} -, globals = function(debugValue) { - return { - process: { - env: { - 'NODE_DEBUG': debugValue - } - }, - console: fakeConsole - }; -}; - -vows.describe('../lib/debug').addBatch({ - 'when NODE_DEBUG is set to log4js': { - topic: function() { - var debug = sandbox.require( - '../lib/debug', - { 'globals': globals('log4js') } - ); - - fakeConsole.logged = []; - debug('cheese')('biscuits'); - return fakeConsole.logged; - }, - 'it should log to console.error': function(logged) { - assert.equal(logged[0], 'LOG4JS: (%s) %s'); - assert.equal(logged[1], 'cheese'); - assert.equal(logged[2], 'biscuits'); - } - }, - - 'when NODE_DEBUG is set to not log4js': { - topic: function() { - var debug = sandbox.require( - '../lib/debug', - { globals: globals('other_module') } - ); - - fakeConsole.logged = []; - debug('cheese')('biscuits'); - return fakeConsole.logged; - }, - 'it should not log to console.error': function(logged) { - assert.equal(logged.length, 0); - } - }, - - 'when NODE_DEBUG is not set': { - topic: function() { - var debug = sandbox.require( - '../lib/debug', - { globals: globals(null) } - ); - - fakeConsole.logged = []; - debug('cheese')('biscuits'); - return fakeConsole.logged; - }, - 'it should not log to console.error': function(logged) { - assert.equal(logged.length, 0); - } - } - -}).exportTo(module); diff --git a/test/log4js-test.js b/test/log4js-test.js index 6c721e90..fe1658fc 100644 --- a/test/log4js-test.js +++ b/test/log4js-test.js @@ -39,7 +39,7 @@ describe('../lib/log4js', function() { (function() { log4js.configure(); }).should.throw( "ENOENT, no such file or directory 'made-up-file'" ); - process.env.LOG4JS_CONFIG = null; + delete process.env.LOG4JS_CONFIG; }); it('should complain if the config does not specify any appenders', function() { @@ -286,7 +286,6 @@ describe('../lib/log4js', function() { }); - it('should reload configuration if specified'); }); describe('with no configuration', function() { From ac4fd2a7fc4427f3e4bad6f3cd421b556666b7d3 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 25 Aug 2013 12:04:49 +1000 Subject: [PATCH 29/53] migrated tests to mocha --- lib/appenders/console.js | 1 - test/consoleAppender-test.js | 50 +++++++++++++++++------------------- 2 files changed, 24 insertions(+), 27 deletions(-) diff --git a/lib/appenders/console.js b/lib/appenders/console.js index 7a470a34..e0e88879 100644 --- a/lib/appenders/console.js +++ b/lib/appenders/console.js @@ -17,5 +17,4 @@ function configure(config) { return consoleAppender(layout); } -exports.appender = consoleAppender; exports.configure = configure; diff --git a/test/consoleAppender-test.js b/test/consoleAppender-test.js index 3887ce5a..c36e5094 100644 --- a/test/consoleAppender-test.js +++ b/test/consoleAppender-test.js @@ -1,33 +1,31 @@ "use strict"; -var assert = require('assert') -, vows = require('vows') -, layouts = require('../lib/layouts') +var should = require('should') , sandbox = require('sandboxed-module'); -vows.describe('../lib/appenders/console').addBatch({ - 'appender': { - topic: function() { - var messages = [] - , fakeConsole = { - log: function(msg) { messages.push(msg); } - } - , appenderModule = sandbox.require( - '../lib/appenders/console', - { - globals: { - 'console': fakeConsole - } +describe('../lib/appenders/console', function() { + var messages = []; + + before(function() { + var fakeConsole = { + log: function(msg) { messages.push(msg); } + } + , appenderModule = sandbox.require( + '../lib/appenders/console', + { + globals: { + 'console': fakeConsole } - ) - , appender = appenderModule.appender(layouts.messagePassThroughLayout); + } + ) + , appender = appenderModule.configure( + { layout: { type: "messagePassThrough" } } + ); - appender({ data: ["blah"] }); - return messages; - }, + appender({ data: ["blah"] }); + }); - 'should output to console': function(messages) { - assert.equal(messages[0], 'blah'); - } - } + it('should output to console', function() { + messages.should.eql(["blah"]); + }); -}).exportTo(module); +}); From 04d0113224f9efac318c555c1dceccf0d0d7f8e8 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 25 Aug 2013 12:05:02 +1000 Subject: [PATCH 30/53] updated changelog --- 0.7-changes | 2 ++ 1 file changed, 2 insertions(+) diff --git a/0.7-changes b/0.7-changes index 4be73d3b..60a3ccc4 100644 --- a/0.7-changes +++ b/0.7-changes @@ -8,3 +8,5 @@ connect logger, gelf, smtp, hookio appenders removed from core. reload configuration removed from core - use 'watchr' or something instead appenders now only need to provide configure function log4js.configure now only takes single argument (no options) +tests use mocha not vows +replaced my debug lib with tjholowaychuk's debug (more of a standard) From b2569c6d9da90b81b14fb307cf84e9db8b9dd146 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 26 Aug 2013 07:58:45 +1000 Subject: [PATCH 31/53] removed log-abspath --- 0.7-changes | 1 + test/log-abspath-test.js | 75 ---------------------------------------- 2 files changed, 1 insertion(+), 75 deletions(-) delete mode 100644 test/log-abspath-test.js diff --git a/0.7-changes b/0.7-changes index 60a3ccc4..973de67a 100644 --- a/0.7-changes +++ b/0.7-changes @@ -10,3 +10,4 @@ appenders now only need to provide configure function log4js.configure now only takes single argument (no options) tests use mocha not vows replaced my debug lib with tjholowaychuk's debug (more of a standard) +options.cwd removed - filenames should always be specified in full, not relative \ No newline at end of file diff --git a/test/log-abspath-test.js b/test/log-abspath-test.js deleted file mode 100644 index 20aa9def..00000000 --- a/test/log-abspath-test.js +++ /dev/null @@ -1,75 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, sandbox = require('sandboxed-module'); - -vows.describe('log4js-abspath').addBatch({ - 'options': { - topic: function() { - var appenderOptions, - log4js = sandbox.require( - '../lib/log4js', - { requires: - { './appenders/fake': - { name: "fake", - appender: function() {}, - configure: function(configuration, options) { - appenderOptions = options; - return function() {}; - } - } - } - } - ), - config = { - "appenders": [ - { - "type" : "fake", - "filename" : "cheesy-wotsits.log" - } - ] - }; - - log4js.configure(config, { - cwd: '/absolute/path/to' - }); - return appenderOptions; - }, - 'should be passed to appenders during configuration': function(options) { - assert.equal(options.cwd, '/absolute/path/to'); - } - }, - - 'file appender': { - topic: function() { - var fileOpened, - fileAppender = sandbox.require( - '../lib/appenders/file', - { requires: - { '../streams': - { RollingFileStream: - function(file) { - fileOpened = file; - return { - on: function() {}, - end: function() {} - }; - } - } - } - } - ); - fileAppender.configure( - { - filename: "whatever.log", - maxLogSize: 10 - }, - { cwd: '/absolute/path/to' } - ); - return fileOpened; - }, - 'should prepend options.cwd to config.filename': function(fileOpened) { - assert.equal(fileOpened, "/absolute/path/to/whatever.log"); - } - }, -}).export(module); From be1a9ca411925969edec45340d761bffea6e2033 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 26 Aug 2013 15:38:40 +1000 Subject: [PATCH 32/53] migrated logLevelFilter tests to mocha, changed filter api slightly --- 0.7-changes | 4 +- lib/appenders/logLevelFilter.js | 35 +++++++-- lib/log4js.js | 10 ++- test/logLevelFilter-test.js | 129 +++++++++++++++++++++++++++++--- 4 files changed, 158 insertions(+), 20 deletions(-) diff --git a/0.7-changes b/0.7-changes index 973de67a..33d8aabc 100644 --- a/0.7-changes +++ b/0.7-changes @@ -10,4 +10,6 @@ appenders now only need to provide configure function log4js.configure now only takes single argument (no options) tests use mocha not vows replaced my debug lib with tjholowaychuk's debug (more of a standard) -options.cwd removed - filenames should always be specified in full, not relative \ No newline at end of file +options.cwd removed - filenames should always be specified in full, not relative +loglevelfilter changed to accept a list of log levels it allows +appenders that wrap other appenders must reference them by name diff --git a/lib/appenders/logLevelFilter.js b/lib/appenders/logLevelFilter.js index ddbb61cf..2aa89cb5 100644 --- a/lib/appenders/logLevelFilter.js +++ b/lib/appenders/logLevelFilter.js @@ -1,21 +1,40 @@ "use strict"; var levels = require('../levels') +, debug = require('debug')('log4js:logLevelFilter') , log4js = require('../log4js'); -function logLevelFilter (levelString, appender) { - var level = levels.toLevel(levelString); +function logLevelFilter(allowedLevels, appender) { return function(logEvent) { - if (logEvent.level.isGreaterThanOrEqualTo(level)) { + debug("Checking ", logEvent.level, " against ", allowedLevels); + if (allowedLevels.some(function(item) { return item.level === logEvent.level.level; })) { + debug("Sending ", logEvent, " to appender ", appender); appender(logEvent); } }; } -function configure(config) { - log4js.loadAppender(config.appender.type); - var appender = log4js.appenderMakers[config.appender.type](config.appender); - return logLevelFilter(config.level, appender); +function configure(config, appenderByName) { + if (!Array.isArray(config.allow)) { + throw new Error("No allowed log levels specified."); + } + + var allowedLevels = config.allow.map(function(allowed) { + var level = levels.toLevel(allowed); + if (!level) { + throw new Error("Unrecognised log level '" + allowed + "'."); + } + return level; + }); + + if (allowedLevels.length === 0) { + throw new Error("No allowed log levels specified."); + } + + if (!config.appender) { + throw new Error("Missing an appender."); + } + + return logLevelFilter(allowedLevels, appenderByName(config.appender)); } -exports.appender = logLevelFilter; exports.configure = configure; diff --git a/lib/log4js.js b/lib/log4js.js index 511a54c9..109fe3c2 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -200,6 +200,14 @@ function clearAppenders () { appenders = {}; } +function appenderByName(name) { + if (appenders.hasOwnProperty(name)) { + return appenders[name]; + } else { + throw new Error("Appender '" + name + "' not found."); + } +} + function configureAppenders(appenderMap) { clearAppenders(); Object.keys(appenderMap).forEach(function(appenderName) { @@ -207,7 +215,7 @@ function configureAppenders(appenderMap) { loadAppender(appenderConfig.type); appenderConfig.makers = appenderMakers; try { - appenders[appenderName] = appenderMakers[appenderConfig.type](appenderConfig); + appenders[appenderName] = appenderMakers[appenderConfig.type](appenderConfig, appenderByName); } catch(e) { throw new Error("log4js configuration problem for appender '" + appenderName + "'. Error was " + e.stack); } diff --git a/test/logLevelFilter-test.js b/test/logLevelFilter-test.js index c9766894..7eef8cd7 100644 --- a/test/logLevelFilter-test.js +++ b/test/logLevelFilter-test.js @@ -1,16 +1,124 @@ "use strict"; -var vows = require('vows') -, fs = require('fs') -, assert = require('assert'); +var should = require('should') +, sandbox = require('sandboxed-module') +, log4js = require('../lib/log4js'); -function remove(filename) { - try { - fs.unlinkSync(filename); - } catch (e) { - //doesn't really matter if it failed - } -} +describe('log level filter', function() { + describe('when configured correctly', function() { + var events = [], logger; + + before(function() { + var log4js_sandboxed = sandbox.require( + '../lib/log4js', + { requires: + { './appenders/console': + { configure: function() { return function(evt) { events.push(evt); }; } } + } + } + ); + log4js_sandboxed.configure({ + appenders: { + "console": { type: "console", layout: { type: "messagePassThrough" } }, + "errors only": { + type: "logLevelFilter", + allow: [ "ERROR", "FATAL" ], + appender: "console" + } + }, + categories: { + default: { level: "DEBUG", appenders: [ "errors only" ] } + } + }); + logger = log4js_sandboxed.getLogger("test"); + }); + + it('should pass events to an appender if they match', function() { + logger.error("oh no"); + logger.fatal("boom"); + + events.should.have.length(2); + events[0].data[0].should.eql("oh no"); + events[1].data[0].should.eql("boom"); + }); + + it('should not pass events to the appender if they do not match', function() { + events.should.have.length(2); + logger.debug("cheese"); + events.should.have.length(2); + logger.info("yawn"); + events.should.have.length(2); + }); + }); + + it('should complain if it has no appender', function() { + (function() { + log4js.configure({ + appenders: { + "errors": { + type: "logLevelFilter", + allow: [ "ERROR", "FATAL" ] + } + }, + categories: { + default: { level: "DEBUG", appenders: [ "errors" ] } + } + }); + }).should.throw(/Missing an appender\./); + }); + + it('should complain if it has no list of allowed levels', function() { + (function() { + log4js.configure({ + appenders: { + "console": { type: "console" }, + "errors": { + type: "logLevelFilter", + appender: "console" + } + }, + categories: { + default: { level: "DEBUG", appenders: [ "errors" ] } + } + }); + }).should.throw(/No allowed log levels specified\./); + }); + + it('should complain if the referenced appender does not exist', function() { + (function() { + log4js.configure({ + appenders: { + "errors": { + type: "logLevelFilter", + allow: [ "ERROR" ], + appender: "console" + } + }, + categories: { + default: { level: "DEBUG", appenders: [ "errors" ] } + } + }); + }).should.throw(/Appender 'console' not found\./); + }); + + it('should complain if the list of levels is not valid', function() { + (function() { + log4js.configure({ + appenders: { + "errors": { + type: "logLevelFilter", + allow: [ "cheese", "biscuits", "ERROR" ], + appender: { type: "console" } + } + }, + categories: { + default: { level: "DEBUG", appenders: [ "errors" ] } + } + }); + }).should.throw(/Unrecognised log level 'cheese'\./); + }); +}); +/* vows.describe('log4js logLevelFilter').addBatch({ 'appender': { topic: function() { @@ -76,3 +184,4 @@ vows.describe('log4js logLevelFilter').addBatch({ } } }).export(module); +*/ From 25e983552137cceb9bc781e74fe1211038bd71ab Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 26 Aug 2013 15:39:44 +1000 Subject: [PATCH 33/53] forgot to remove old vows tests --- test/logLevelFilter-test.js | 68 ------------------------------------- 1 file changed, 68 deletions(-) diff --git a/test/logLevelFilter-test.js b/test/logLevelFilter-test.js index 7eef8cd7..cbc64abc 100644 --- a/test/logLevelFilter-test.js +++ b/test/logLevelFilter-test.js @@ -117,71 +117,3 @@ describe('log level filter', function() { }).should.throw(/Unrecognised log level 'cheese'\./); }); }); - -/* -vows.describe('log4js logLevelFilter').addBatch({ - 'appender': { - topic: function() { - var log4js = require('../lib/log4js'), logEvents = [], logger; - log4js.clearAppenders(); - log4js.addAppender( - require('../lib/appenders/logLevelFilter') - .appender( - 'ERROR', - function(evt) { logEvents.push(evt); } - ), - "logLevelTest" - ); - - logger = log4js.getLogger("logLevelTest"); - logger.debug('this should not trigger an event'); - logger.warn('neither should this'); - logger.error('this should, though'); - logger.fatal('so should this'); - return logEvents; - }, - 'should only pass log events greater than or equal to its own level' : function(logEvents) { - assert.equal(logEvents.length, 2); - assert.equal(logEvents[0].data[0], 'this should, though'); - assert.equal(logEvents[1].data[0], 'so should this'); - } - }, - - 'configure': { - topic: function() { - var log4js = require('../lib/log4js') - , logger; - - remove(__dirname + '/logLevelFilter.log'); - remove(__dirname + '/logLevelFilter-warnings.log'); - - log4js.configure('test/with-logLevelFilter.json'); - logger = log4js.getLogger("tests"); - logger.info('main'); - logger.error('both'); - logger.warn('both'); - logger.debug('main'); - //wait for the file system to catch up - setTimeout(this.callback, 100); - }, - 'tmp-tests.log': { - topic: function() { - fs.readFile(__dirname + '/logLevelFilter.log', 'utf8', this.callback); - }, - 'should contain all log messages': function(contents) { - var messages = contents.trim().split('\n'); - assert.deepEqual(messages, ['main','both','both','main']); - } - }, - 'tmp-tests-warnings.log': { - topic: function() { - fs.readFile(__dirname + '/logLevelFilter-warnings.log','utf8',this.callback); - }, - 'should contain only error and warning log messages': function(contents) { - var messages = contents.trim().split('\n'); - assert.deepEqual(messages, ['both','both']); - } - } - } -}).export(module); -*/ From 7c0cfbdcfdc91c31cbffe6924d29cee4953aded4 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 26 Aug 2013 16:51:29 +1000 Subject: [PATCH 34/53] converted to mocha tests --- test/date_format-test.js | 73 +++++++++++++++------------------------- 1 file changed, 27 insertions(+), 46 deletions(-) diff --git a/test/date_format-test.js b/test/date_format-test.js index 60858431..de2c5460 100644 --- a/test/date_format-test.js +++ b/test/date_format-test.js @@ -1,51 +1,32 @@ "use strict"; -var vows = require('vows') -, assert = require('assert') +var should = require('should') , dateFormat = require('../lib/date_format'); -vows.describe('date_format').addBatch({ - 'Date extensions': { - topic: function() { - return new Date(2010, 0, 11, 14, 31, 30, 5); - }, - 'should format a date as string using a pattern': function(date) { - assert.equal( - dateFormat.asString(dateFormat.DATETIME_FORMAT, date), - "11 01 2010 14:31:30.005" - ); - }, - 'should default to the ISO8601 format': function(date) { - assert.equal( - dateFormat.asString(date), - '2010-01-11 14:31:30.005' - ); - }, - 'should provide a ISO8601 with timezone offset format': function(date) { - date.getTimezoneOffset = function() { return -660; }; - assert.equal( - dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date), - "2010-01-11T14:31:30+1100" - ); +describe('date_format', function() { + var date = new Date(2010, 0, 11, 14, 31, 30, 5); - date.getTimezoneOffset = function() { return 120; }; - assert.equal( - dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date), - "2010-01-11T14:31:30-0200" - ); + it('should format a date as string using a pattern', function() { + dateFormat.asString(dateFormat.DATETIME_FORMAT, date).should.eql("11 01 2010 14:31:30.005"); + }); - }, - 'should provide a just-the-time format': function(date) { - assert.equal( - dateFormat.asString(dateFormat.ABSOLUTETIME_FORMAT, date), - '14:31:30.005' - ); - }, - 'should provide a custom format': function(date) { - date.getTimezoneOffset = function() { return 120; }; - assert.equal( - dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date), - '-0200.005.30.31.14.11.01.10' - ); - } - } -}).export(module); + it('should default to the ISO8601 format', function() { + dateFormat.asString(date).should.eql('2010-01-11 14:31:30.005'); + }); + + it('should provide a ISO8601 with timezone offset format', function() { + date.getTimezoneOffset = function() { return -660; }; + dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date).should.eql("2010-01-11T14:31:30+1100"); + + date.getTimezoneOffset = function() { return 120; }; + dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date).should.eql("2010-01-11T14:31:30-0200"); + }); + + it('should provide a just-the-time format', function() { + dateFormat.asString(dateFormat.ABSOLUTETIME_FORMAT, date).should.eql('14:31:30.005'); + }); + + it('should provide a custom format', function() { + date.getTimezoneOffset = function() { return 120; }; + dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date).should.eql('-0200.005.30.31.14.11.01.10'); + }); +}); From b6dc0b95573e40997a3f78ff9772cd37062683e3 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 26 Aug 2013 21:49:59 +1000 Subject: [PATCH 35/53] migrated layout tests to mocha --- test/layouts-test.js | 418 +++++++++++++++++++++++-------------------- 1 file changed, 226 insertions(+), 192 deletions(-) diff --git a/test/layouts-test.js b/test/layouts-test.js index c355bdd9..d1232b71 100644 --- a/test/layouts-test.js +++ b/test/layouts-test.js @@ -1,23 +1,16 @@ "use strict"; -var vows = require('vows') -, assert = require('assert'); +var assert = require('assert'); //used for patternLayout tests. -function test(args, pattern, value) { - var layout = args[0] - , event = args[1] - , tokens = args[2]; - +function test(layout, event, tokens, pattern, value) { assert.equal(layout(pattern, tokens)(event), value); } -vows.describe('log4js layouts').addBatch({ - 'colouredLayout': { - topic: function() { - return require('../lib/layouts').colouredLayout; - }, +describe('log4js layouts', function() { + describe('colouredLayout', function() { + var layout = require('../lib/layouts').colouredLayout; - 'should apply level colour codes to output': function(layout) { + it('should apply level colour codes to output', function() { var output = layout({ data: ["nonsense"], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), @@ -27,8 +20,9 @@ vows.describe('log4js layouts').addBatch({ } }); assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mnonsense'); - }, - 'should support the console.log format for the message': function(layout) { + }); + + it('should support the console.log format for the message', function() { var output = layout({ data: ["thing %d", 2], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), @@ -38,14 +32,14 @@ vows.describe('log4js layouts').addBatch({ } }); assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mthing 2'); - } - }, + }); + + }); - 'messagePassThroughLayout': { - topic: function() { - return require('../lib/layouts').messagePassThroughLayout; - }, - 'should take a logevent and output only the message' : function(layout) { + describe('messagePassThroughLayout', function() { + var layout = require('../lib/layouts').messagePassThroughLayout; + + it('should take a logevent and output only the message', function() { assert.equal(layout({ data: ["nonsense"], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), @@ -55,8 +49,9 @@ vows.describe('log4js layouts').addBatch({ toString: function() { return "ERROR"; } } }), "nonsense"); - }, - 'should support the console.log format for the message' : function(layout) { + }); + + it('should support the console.log format for the message', function() { assert.equal(layout({ data: ["thing %d", 1, "cheese"], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), @@ -66,8 +61,9 @@ vows.describe('log4js layouts').addBatch({ toString: function() { return "ERROR"; } } }), "thing 1 cheese"); - }, - 'should output the first item even if it is not a string': function(layout) { + }); + + it('should output the first item even if it is not a string', function() { assert.equal(layout({ data: [ { thing: 1} ], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), @@ -77,25 +73,36 @@ vows.describe('log4js layouts').addBatch({ toString: function() { return "ERROR"; } } }), "{ thing: 1 }"); - }, - 'should print the stacks of a passed error objects': function(layout) { - assert.isArray(layout({ - data: [ new Error() ], - startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "cheese", - level: { - colour: "green", - toString: function() { return "ERROR"; } - } - }).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/) - , 'regexp did not return a match'); - }, - 'with passed augmented errors': { - topic: function(layout){ + }); + + it('should print the stacks of a passed error objects', function() { + assert.ok( + Array.isArray( + layout({ + data: [ new Error() ], + startTime: new Date(2010, 11, 5, 14, 18, 30, 45), + categoryName: "cheese", + level: { + colour: "green", + toString: function() { return "ERROR"; } + } + }).match( + /Error\s+at Context\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s/ + ) + ), + 'regexp did not return a match' + ); + }); + + describe('with passed augmented errors', function() { + var layoutOutput; + + before(function() { var e = new Error("My Unique Error Message"); e.augmented = "My Unique attribute value"; e.augObj = { at1: "at2" }; - return layout({ + + layoutOutput = layout({ data: [ e ], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), categoryName: "cheese", @@ -104,45 +111,46 @@ vows.describe('log4js layouts').addBatch({ toString: function() { return "ERROR"; } } }); - }, - 'should print error the contained error message': function(layoutOutput) { + }); + + it('should print error the contained error message', function() { var m = layoutOutput.match(/\{ \[Error: My Unique Error Message\]/); - assert.isArray(m); - }, - 'should print error augmented string attributes': function(layoutOutput) { + assert.ok(Array.isArray(m)); + }); + + it('should print error augmented string attributes', function() { var m = layoutOutput.match(/augmented:\s'My Unique attribute value'/); - assert.isArray(m); - }, - 'should print error augmented object attributes': function(layoutOutput) { + assert.ok(Array.isArray(m)); + }); + + it('should print error augmented object attributes', function() { var m = layoutOutput.match(/augObj:\s\{ at1: 'at2' \}/); - assert.isArray(m); - } - } - + assert.ok(Array.isArray(m)); + }); + }); - }, + }); - 'basicLayout': { - topic: function() { - var layout = require('../lib/layouts').basicLayout, - event = { - data: ['this is a test'], - startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "tests", - level: { - toString: function() { return "DEBUG"; } - } - }; - return [layout, event]; - }, - 'should take a logevent and output a formatted string': function(args) { - var layout = args[0], event = args[1]; + describe('basicLayout', function() { + var layout = require('../lib/layouts').basicLayout + , event = { + data: ['this is a test'], + startTime: new Date(2010, 11, 5, 14, 18, 30, 45), + categoryName: "tests", + level: { + toString: function() { return "DEBUG"; } + } + }; + + it('should take a logevent and output a formatted string', function() { assert.equal(layout(event), "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test"); - }, - 'should output a stacktrace, message if the event has an error attached': function(args) { - var layout = args[0], event = args[1], output, lines, - error = new Error("Some made-up error"), - stack = error.stack.split(/\n/); + }); + + it('should output a stacktrace, message if the event has an error attached', function() { + var output + , lines + , error = new Error("Some made-up error") + , stack = error.stack.split(/\n/); event.data = ['this is a test', error]; output = layout(event); @@ -157,140 +165,166 @@ vows.describe('log4js layouts').addBatch({ for (var i = 1; i < stack.length; i++) { assert.equal(lines[i+2], stack[i+1]); } - }, - 'should output any extra data in the log event as util.inspect strings': function(args) { - var layout = args[0], event = args[1], output, lines; + }); + + it('should output any extra data in the log event as util.inspect strings', function() { + var output, lines; + event.data = ['this is a test', { name: 'Cheese', message: 'Gorgonzola smells.' }]; output = layout(event); + assert.equal( output, "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test " + "{ name: 'Cheese', message: 'Gorgonzola smells.' }" ); - } - }, + }); + }); - 'patternLayout': { - topic: function() { - var event = { - data: ['this is a test'], - startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "multiple.levels.of.tests", - level: { - toString: function() { return "DEBUG"; } - } - }, layout = require('../lib/layouts').patternLayout - , tokens = { - testString: 'testStringToken', - testFunction: function() { return 'testFunctionToken'; }, - fnThatUsesLogEvent: function(logEvent) { return logEvent.level.toString(); } - }; - return [layout, event, tokens]; - }, + describe('patternLayout', function() { + var event = { + data: ['this is a test'], + startTime: new Date(2010, 11, 5, 14, 18, 30, 45), + categoryName: "multiple.levels.of.tests", + level: { + toString: function() { return "DEBUG"; } + } + } + , layout = require('../lib/layouts').patternLayout + , tokens = { + testString: 'testStringToken', + testFunction: function() { return 'testFunctionToken'; }, + fnThatUsesLogEvent: function(logEvent) { return logEvent.level.toString(); } + }; + + event.startTime.getTimezoneOffset = function() { return 0; }; - 'should default to "time logLevel loggerName - message"': function(args) { - test(args, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n"); - }, - '%r should output time only': function(args) { - test(args, '%r', '14:18:30'); - }, - '%p should output the log level': function(args) { - test(args, '%p', 'DEBUG'); - }, - '%c should output the log category': function(args) { - test(args, '%c', 'multiple.levels.of.tests'); - }, - '%m should output the log data': function(args) { - test(args, '%m', 'this is a test'); - }, - '%n should output a new line': function(args) { - test(args, '%n', '\n'); - }, - '%h should output hostname' : function(args) { - test(args, '%h', require('os').hostname().toString()); - }, - '%c should handle category names like java-style package names': function(args) { - test(args, '%c{1}', 'tests'); - test(args, '%c{2}', 'of.tests'); - test(args, '%c{3}', 'levels.of.tests'); - test(args, '%c{4}', 'multiple.levels.of.tests'); - test(args, '%c{5}', 'multiple.levels.of.tests'); - test(args, '%c{99}', 'multiple.levels.of.tests'); - }, - '%d should output the date in ISO8601 format': function(args) { - test(args, '%d', '2010-12-05 14:18:30.045'); - }, - '%d should allow for format specification': function(args) { - test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30-0000'); - test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045'); - test(args, '%d{ABSOLUTE}', '14:18:30.045'); - test(args, '%d{DATE}', '05 12 2010 14:18:30.045'); - test(args, '%d{yy MM dd hh mm ss}', '10 12 05 14 18 30'); - test(args, '%d{yyyy MM dd}', '2010 12 05'); - test(args, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045'); - }, - '%% should output %': function(args) { - test(args, '%%', '%'); - }, - 'should output anything not preceded by % as literal': function(args) { - test(args, 'blah blah blah', 'blah blah blah'); - }, - 'should output the original string if no replacer matches the token': function(args) { - test(args, '%a{3}', 'a{3}'); - }, - 'should handle complicated patterns': function(args) { - test(args, + it('should default to "time logLevel loggerName - message"', function() { + test(layout, event, tokens, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n"); + }); + + it('%r should output time only', function() { + test(layout, event, tokens, '%r', '14:18:30'); + }); + + it('%p should output the log level', function() { + test(layout, event, tokens, '%p', 'DEBUG'); + }); + + it('%c should output the log category', function() { + test(layout, event, tokens, '%c', 'multiple.levels.of.tests'); + }); + + it('%m should output the log data', function() { + test(layout, event, tokens, '%m', 'this is a test'); + }); + + it('%n should output a new line', function() { + test(layout, event, tokens, '%n', '\n'); + }); + + it('%h should output hostname', function() { + test(layout, event, tokens, '%h', require('os').hostname().toString()); + }); + + it('%c should handle category names like java-style package names', function() { + test(layout, event, tokens, '%c{1}', 'tests'); + test(layout, event, tokens, '%c{2}', 'of.tests'); + test(layout, event, tokens, '%c{3}', 'levels.of.tests'); + test(layout, event, tokens, '%c{4}', 'multiple.levels.of.tests'); + test(layout, event, tokens, '%c{5}', 'multiple.levels.of.tests'); + test(layout, event, tokens, '%c{99}', 'multiple.levels.of.tests'); + }); + + it('%d should output the date in ISO8601 format', function() { + test(layout, event, tokens, '%d', '2010-12-05 14:18:30.045'); + }); + + it('%d should allow for format specification', function() { + test(layout, event, tokens, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30-0000'); + test(layout, event, tokens, '%d{ISO8601}', '2010-12-05 14:18:30.045'); + test(layout, event, tokens, '%d{ABSOLUTE}', '14:18:30.045'); + test(layout, event, tokens, '%d{DATE}', '05 12 2010 14:18:30.045'); + test(layout, event, tokens, '%d{yy MM dd hh mm ss}', '10 12 05 14 18 30'); + test(layout, event, tokens, '%d{yyyy MM dd}', '2010 12 05'); + test(layout, event, tokens, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045'); + }); + + it('%% should output %', function() { + test(layout, event, tokens, '%%', '%'); + }); + + it('should output anything not preceded by % as literal', function() { + test(layout, event, tokens, 'blah blah blah', 'blah blah blah'); + }); + + it('should output the original string if no replacer matches the token', function() { + test(layout, event, tokens, '%a{3}', 'a{3}'); + }); + + it('should handle complicated patterns', function() { + test(layout, event, tokens, '%m%n %c{2} at %d{ABSOLUTE} cheese %p%n', 'this is a test\n of.tests at 14:18:30.045 cheese DEBUG\n' ); - }, - 'should truncate fields if specified': function(args) { - test(args, '%.4m', 'this'); - test(args, '%.7m', 'this is'); - test(args, '%.9m', 'this is a'); - test(args, '%.14m', 'this is a test'); - test(args, '%.2919102m', 'this is a test'); - }, - 'should pad fields if specified': function(args) { - test(args, '%10p', ' DEBUG'); - test(args, '%8p', ' DEBUG'); - test(args, '%6p', ' DEBUG'); - test(args, '%4p', 'DEBUG'); - test(args, '%-4p', 'DEBUG'); - test(args, '%-6p', 'DEBUG '); - test(args, '%-8p', 'DEBUG '); - test(args, '%-10p', 'DEBUG '); - }, - '%[%r%] should output colored time': function(args) { - test(args, '%[%r%]', '\x1B[36m14:18:30\x1B[39m'); - }, - '%x{testString} should output the string stored in tokens': function(args) { - test(args, '%x{testString}', 'testStringToken'); - }, - '%x{testFunction} should output the result of the function stored in tokens': function(args) { - test(args, '%x{testFunction}', 'testFunctionToken'); - }, - '%x{doesNotExist} should output the string stored in tokens': function(args) { - test(args, '%x{doesNotExist}', '%x{doesNotExist}'); - }, - '%x{fnThatUsesLogEvent} should be able to use the logEvent': function(args) { - test(args, '%x{fnThatUsesLogEvent}', 'DEBUG'); - }, - '%x should output the string stored in tokens': function(args) { - test(args, '%x', '%x'); - }, - }, - 'layout makers': { - topic: require('../lib/layouts'), - 'should have a maker for each layout': function(layouts) { + }); + + it('should truncate fields if specified', function() { + test(layout, event, tokens, '%.4m', 'this'); + test(layout, event, tokens, '%.7m', 'this is'); + test(layout, event, tokens, '%.9m', 'this is a'); + test(layout, event, tokens, '%.14m', 'this is a test'); + test(layout, event, tokens, '%.2919102m', 'this is a test'); + }); + + it('should pad fields if specified', function() { + test(layout, event, tokens, '%10p', ' DEBUG'); + test(layout, event, tokens, '%8p', ' DEBUG'); + test(layout, event, tokens, '%6p', ' DEBUG'); + test(layout, event, tokens, '%4p', 'DEBUG'); + test(layout, event, tokens, '%-4p', 'DEBUG'); + test(layout, event, tokens, '%-6p', 'DEBUG '); + test(layout, event, tokens, '%-8p', 'DEBUG '); + test(layout, event, tokens, '%-10p', 'DEBUG '); + }); + + it('%[%r%] should output colored time', function() { + test(layout, event, tokens, '%[%r%]', '\x1B[36m14:18:30\x1B[39m'); + }); + + it('%x{testString} should output the string stored in tokens', function() { + test(layout, event, tokens, '%x{testString}', 'testStringToken'); + }); + + it('%x{testFunction} should output the result of the function stored in tokens', function() { + test(layout, event, tokens, '%x{testFunction}', 'testFunctionToken'); + }); + + it('%x{doesNotExist} should output the string stored in tokens', function() { + test(layout, event, tokens, '%x{doesNotExist}', '%x{doesNotExist}'); + }); + + it('%x{fnThatUsesLogEvent} should be able to use the logEvent', function() { + test(layout, event, tokens, '%x{fnThatUsesLogEvent}', 'DEBUG'); + }); + + it('%x should output the string stored in tokens', function() { + test(layout, event, tokens, '%x', '%x'); + }); + }); + + describe('layout makers', function() { + var layouts = require('../lib/layouts'); + + it('should have a maker for each layout', function() { assert.ok(layouts.layout("messagePassThrough")); assert.ok(layouts.layout("basic")); assert.ok(layouts.layout("colored")); assert.ok(layouts.layout("coloured")); assert.ok(layouts.layout("pattern")); - } - } -}).export(module); + }); + }); +}); From 045b0dda2b9930cd6fda524f89dd61d99336c713 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 26 Aug 2013 22:48:50 +1000 Subject: [PATCH 36/53] renamed categoryName -> category --- lib/layouts.js | 10 +++++----- test/layouts-test.js | 18 +++++++++--------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/lib/layouts.js b/lib/layouts.js index 9cfd0353..9043448f 100644 --- a/lib/layouts.js +++ b/lib/layouts.js @@ -77,7 +77,7 @@ function timestampLevelAndCategory(loggingEvent, colour) { '[%s] [%s] %s - ' , dateFormat.asString(loggingEvent.startTime) , loggingEvent.level - , loggingEvent.categoryName + , loggingEvent.category ) , colour ); @@ -88,7 +88,7 @@ function timestampLevelAndCategory(loggingEvent, colour) { * BasicLayout is a simple layout for storing the logs. The logs are stored * in following format: *
- * [startTime] [logLevel] categoryName - message\n
+ * [startTime] [logLevel] category - message\n
  * 
* * @author Stephan Strittmatter @@ -148,8 +148,8 @@ function patternLayout (pattern, tokens) { pattern = pattern || TTCC_CONVERSION_PATTERN; - function categoryName(loggingEvent, specifier) { - var loggerName = loggingEvent.categoryName; + function category(loggingEvent, specifier) { + var loggerName = loggingEvent.category; if (specifier) { var precision = parseInt(specifier, 10); var loggerNameBits = loggerName.split("."); @@ -223,7 +223,7 @@ function patternLayout (pattern, tokens) { } var replacers = { - 'c': categoryName, + 'c': category, 'd': formatAsDate, 'h': hostname, 'm': formatMessage, diff --git a/test/layouts-test.js b/test/layouts-test.js index d1232b71..26bce675 100644 --- a/test/layouts-test.js +++ b/test/layouts-test.js @@ -14,7 +14,7 @@ describe('log4js layouts', function() { var output = layout({ data: ["nonsense"], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "cheese", + category: "cheese", level: { toString: function() { return "ERROR"; } } @@ -26,7 +26,7 @@ describe('log4js layouts', function() { var output = layout({ data: ["thing %d", 2], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "cheese", + category: "cheese", level: { toString: function() { return "ERROR"; } } @@ -43,7 +43,7 @@ describe('log4js layouts', function() { assert.equal(layout({ data: ["nonsense"], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "cheese", + category: "cheese", level: { colour: "green", toString: function() { return "ERROR"; } @@ -55,7 +55,7 @@ describe('log4js layouts', function() { assert.equal(layout({ data: ["thing %d", 1, "cheese"], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "cheese", + category: "cheese", level : { colour: "green", toString: function() { return "ERROR"; } @@ -67,7 +67,7 @@ describe('log4js layouts', function() { assert.equal(layout({ data: [ { thing: 1} ], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "cheese", + category: "cheese", level: { colour: "green", toString: function() { return "ERROR"; } @@ -81,7 +81,7 @@ describe('log4js layouts', function() { layout({ data: [ new Error() ], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "cheese", + category: "cheese", level: { colour: "green", toString: function() { return "ERROR"; } @@ -105,7 +105,7 @@ describe('log4js layouts', function() { layoutOutput = layout({ data: [ e ], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "cheese", + category: "cheese", level: { colour: "green", toString: function() { return "ERROR"; } @@ -136,7 +136,7 @@ describe('log4js layouts', function() { , event = { data: ['this is a test'], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "tests", + category: "tests", level: { toString: function() { return "DEBUG"; } } @@ -188,7 +188,7 @@ describe('log4js layouts', function() { var event = { data: ['this is a test'], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "multiple.levels.of.tests", + category: "multiple.levels.of.tests", level: { toString: function() { return "DEBUG"; } } From 3312724d7da4daaffb353b6f2bea90f6f90254c0 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 26 Aug 2013 22:49:12 +1000 Subject: [PATCH 37/53] migrated file appender tests to mocha --- lib/appenders/file.js | 6 +- test/fileAppender-test.js | 376 ++++++++++++++++++++------------------ 2 files changed, 202 insertions(+), 180 deletions(-) diff --git a/lib/appenders/file.js b/lib/appenders/file.js index da0e80b3..c8566162 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -65,16 +65,12 @@ function fileAppender (file, layout, logSize, numBackups) { }; } -function configure(config, options) { +function configure(config) { var layout; if (config.layout) { layout = layouts.layout(config.layout.type, config.layout); } - if (options && options.cwd && !config.absolute) { - config.filename = path.join(options.cwd, config.filename); - } - return fileAppender(config.filename, layout, config.maxLogSize, config.backups); } diff --git a/test/fileAppender-test.js b/test/fileAppender-test.js index 9476ad65..11fe4f17 100644 --- a/test/fileAppender-test.js +++ b/test/fileAppender-test.js @@ -1,12 +1,9 @@ "use strict"; -var vows = require('vows') -, fs = require('fs') +var fs = require('fs') , path = require('path') , sandbox = require('sandboxed-module') , log4js = require('../lib/log4js') -, assert = require('assert'); - -log4js.clearAppenders(); +, should = require('should'); function remove(filename) { try { @@ -16,30 +13,38 @@ function remove(filename) { } } -vows.describe('log4js fileAppender').addBatch({ - 'adding multiple fileAppenders': { - topic: function () { - var listenersCount = process.listeners('exit').length - , logger = log4js.getLogger('default-settings') - , count = 5, logfile; - +describe('log4js fileAppender', function() { + + describe('adding multiple fileAppenders', function() { + var initialCount, listenersCount; + + before(function() { + var logfile + , count = 5 + , config = { appenders: {}, categories: { default: { level: "debug", appenders: ["file0"] } } }; + + initialCount = process.listeners('exit').length + while (count--) { logfile = path.join(__dirname, '/fa-default-test' + count + '.log'); - log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings'); + config.appenders["file" + count] = { type: "file", filename: logfile }; } - - return listenersCount; - }, + + log4js.configure(config); + + listenersCount = process.listeners('exit').length; + }); - 'does not add more than one `exit` listeners': function (initialCount) { - assert.ok(process.listeners('exit').length <= initialCount + 1); - } - }, + it('does not add more than one `exit` listeners', function () { + listenersCount.should.be.below(initialCount + 2); + }); + }); - 'exit listener': { - topic: function() { + describe('exit listener', function() { + var openedFiles = []; + + before(function() { var exitListener - , openedFiles = [] , fileAppender = sandbox.require( '../lib/appenders/file', { @@ -68,178 +73,198 @@ vows.describe('log4js fileAppender').addBatch({ for (var i=0; i < 5; i += 1) { fileAppender.appender('test' + i, null, 100); } - assert.isNotEmpty(openedFiles); + openedFiles.should.not.be.empty; exitListener(); - return openedFiles; - }, - 'should close all open files': function(openedFiles) { - assert.isEmpty(openedFiles); - } - }, - - 'with default fileAppender settings': { - topic: function() { + }); + + it('should close all open files', function() { + openedFiles.should.be.empty; + }); + }); + + describe('with default fileAppender settings', function() { + var fileContents; + + before(function(done) { var that = this , testFile = path.join(__dirname, '/fa-default-test.log') , logger = log4js.getLogger('default-settings'); + remove(testFile); - log4js.clearAppenders(); - log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings'); + log4js.configure({ + appenders: { + "file": { type: "file", filename: testFile } + }, + categories: { + default: { level: "debug", appenders: [ "file" ] } + } + }); logger.info("This should be in the file."); setTimeout(function() { - fs.readFile(testFile, "utf8", that.callback); + fs.readFile(testFile, "utf8", function(err, contents) { + if (!err) { + fileContents = contents; + } + done(err); + }); }, 100); - }, - 'should write log messages to the file': function(err, fileContents) { - assert.include(fileContents, "This should be in the file.\n"); - }, - 'log messages should be in the basic layout format': function(err, fileContents) { - assert.match( - fileContents, + }); + + it('should write log messages to the file', function() { + fileContents.should.include("This should be in the file.\n"); + }); + + it('log messages should be in the basic layout format', function() { + fileContents.should.match( /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - / ); - } - }, - 'with a max file size and no backups': { - topic: function() { - var testFile = path.join(__dirname, '/fa-maxFileSize-test.log') - , logger = log4js.getLogger('max-file-size') - , that = this; + }); + }); + + describe('with a max file size and no backups', function() { + var testFile = path.join(__dirname, '/fa-maxFileSize-test.log'); + + before(function() { + var logger = log4js.getLogger('max-file-size'); + remove(testFile); remove(testFile + '.1'); + //log file of 100 bytes maximum, no backups - log4js.clearAppenders(); - log4js.addAppender( - require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0), - 'max-file-size' - ); + log4js.configure({ + appenders: { + "file": { type: "file", filename: testFile, maxLogSize: 100, backups: 0 } + }, + categories: { + default: { level: "debug", appenders: [ "file" ] } + } + }); logger.info("This is the first log message."); logger.info("This is an intermediate log message."); logger.info("This is the second log message."); - //wait for the file system to catch up - setTimeout(function() { - fs.readFile(testFile, "utf8", that.callback); - }, 100); - }, - 'log file should only contain the second message': function(err, fileContents) { - assert.include(fileContents, "This is the second log message.\n"); - assert.equal(fileContents.indexOf("This is the first log message."), -1); - }, - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'starting with the test file name should be two': function(err, files) { - //there will always be one backup if you've specified a max log size - var logFiles = files.filter( - function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; } - ); - assert.equal(logFiles.length, 2); - } - } - }, - 'with a max file size and 2 backups': { - topic: function() { - var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-test.log') - , logger = log4js.getLogger('max-file-size-backups'); + }); + + describe('log file', function() { + it('should only contain the second message', function(done) { + //wait for the file system to catch up + setTimeout(function() { + fs.readFile(testFile, "utf8", function(err, fileContents) { + fileContents.should.include("This is the second log message.\n"); + fileContents.should.not.include("This is the first log message."); + done(err); + }); + }, 100); + }); + }); + + describe('the number of files starting with the test file name', function() { + it('should be two', function(done) { + fs.readdir(__dirname, function(err, files) { + //there will always be one backup if you've specified a max log size + var logFiles = files.filter( + function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; } + ); + logFiles.should.have.length(2); + done(err); + }); + }); + }); + }); + + describe('with a max file size and 2 backups', function() { + var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-test.log'); + + before(function() { + var logger = log4js.getLogger('max-file-size-backups'); remove(testFile); remove(testFile+'.1'); remove(testFile+'.2'); //log file of 50 bytes maximum, 2 backups - log4js.clearAppenders(); - log4js.addAppender( - require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2), - 'max-file-size-backups' - ); + log4js.configure({ + appenders: { + "file": { type: "file", filename: testFile, maxLogSize: 50, backups: 2 } + }, + categories: { + default: { level: "debug", appenders: [ "file" ] } + } + }); + logger.info("This is the first log message."); logger.info("This is the second log message."); logger.info("This is the third log message."); logger.info("This is the fourth log message."); - var that = this; - //give the system a chance to open the stream - setTimeout(function() { - fs.readdir(__dirname, function(err, files) { - if (files) { - that.callback(null, files.sort()); - } else { - that.callback(err, files); - } - }); - }, 200); - }, - 'the log files': { - topic: function(files) { - var logFiles = files.filter( - function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; } - ); - return logFiles; - }, - 'should be 3': function (files) { - assert.equal(files.length, 3); - }, - 'should be named in sequence': function (files) { - assert.deepEqual(files, [ + }); + + describe('the log files', function() { + var logFiles; + + before(function(done) { + setTimeout(function() { + fs.readdir(__dirname, function(err, files) { + if (files) { + logFiles = files.sort().filter( + function(file) { + return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; + } + ); + done(null); + } else { + done(err); + } + }); + }, 200); + }); + + it('should be 3', function () { + logFiles.should.have.length(3); + }); + + it('should be named in sequence', function() { + logFiles.should.eql([ 'fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2' ]); - }, - 'and the contents of the first file': { - topic: function(logFiles) { - fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback); - }, - 'should be the last log message': function(contents) { - assert.include(contents, 'This is the fourth log message.'); - } - }, - 'and the contents of the second file': { - topic: function(logFiles) { - fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback); - }, - 'should be the third log message': function(contents) { - assert.include(contents, 'This is the third log message.'); - } - }, - 'and the contents of the third file': { - topic: function(logFiles) { - fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback); - }, - 'should be the second log message': function(contents) { - assert.include(contents, 'This is the second log message.'); - } - } - } - } -}).addBatch({ - 'configure' : { - 'with fileAppender': { - topic: function() { - var log4js = require('../lib/log4js') - , logger; - //this config file defines one file appender (to ./tmp-tests.log) - //and sets the log level for "tests" to WARN - log4js.configure('./test/log4js.json'); - logger = log4js.getLogger('tests'); - logger.info('this should not be written to the file'); - logger.warn('this should be written to the file'); - - fs.readFile('tmp-tests.log', 'utf8', this.callback); - }, - 'should load appender configuration from a json file': function(err, contents) { - assert.include(contents, 'this should be written to the file\n'); - assert.equal(contents.indexOf('this should not be written to the file'), -1); - } - } - } -}).addBatch({ - 'when underlying stream errors': { - topic: function() { - var consoleArgs - , errorHandler + }); + + describe('and the contents of the first file', function() { + it('should be the last log message', function(done) { + fs.readFile(path.join(__dirname, logFiles[0]), "utf8", function(err, contents) { + contents.should.include('This is the fourth log message.'); + done(err); + }); + }); + }); + + describe('and the contents of the second file', function() { + it('should be the third log message', function(done) { + fs.readFile(path.join(__dirname, logFiles[1]), "utf8", function(err, contents) { + contents.should.include('This is the third log message.'); + done(err); + }); + }); + }); + + describe('and the contents of the third file', function() { + it('should be the second log message', function(done) { + fs.readFile(path.join(__dirname, logFiles[2]), "utf8", function(err, contents) { + contents.should.include('This is the second log message.'); + done(err); + }); + }); + }); + }); + }); + + describe('when underlying stream errors', function() { + var consoleArgs; + + before(function() { + var errorHandler , fileAppender = sandbox.require( '../lib/appenders/file', { @@ -265,16 +290,17 @@ vows.describe('log4js fileAppender').addBatch({ } } ); - fileAppender.appender('test1.log', null, 100); + fileAppender.configure({ + filename: 'test1.log', maxLogSize: 100 + }); errorHandler({ error: 'aargh' }); - return consoleArgs; - }, - 'should log the error to console.error': function(consoleArgs) { - assert.isNotEmpty(consoleArgs); - assert.equal(consoleArgs[0], 'log4js.fileAppender - Writing to file %s, error happened '); - assert.equal(consoleArgs[1], 'test1.log'); - assert.equal(consoleArgs[2].error, 'aargh'); - } - } - -}).export(module); + }); + + it('should log the error to console.error', function() { + consoleArgs.should.not.be.empty; + consoleArgs[0].should.eql('log4js.fileAppender - Writing to file %s, error happened '); + consoleArgs[1].should.eql('test1.log'); + consoleArgs[2].error.should.eql('aargh'); + }); + }); +}); From d43d49d83dbcbd66f6e5c940f7d28baad82e8b45 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Tue, 27 Aug 2013 13:44:33 +1000 Subject: [PATCH 38/53] converted date file appender tests to mocha --- lib/log4js.js | 4 +- test/dateFileAppender-test.js | 358 +++++++++++++++++----------------- test/fileAppender-test.js | 138 +++++++------ test/with-dateFile.json | 11 +- 4 files changed, 269 insertions(+), 242 deletions(-) diff --git a/lib/log4js.js b/lib/log4js.js index 109fe3c2..3a112f89 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -136,7 +136,9 @@ function dispatch(event) { function load(file) { debug("loading ", file); - return JSON.parse(fs.readFileSync(file, "utf-8")); + var contents = fs.readFileSync(file, "utf-8"); + debug("file contents ", contents); + return JSON.parse(contents); } function configure(configurationFileOrObject) { diff --git a/test/dateFileAppender-test.js b/test/dateFileAppender-test.js index 59355e21..883b2f9e 100644 --- a/test/dateFileAppender-test.js +++ b/test/dateFileAppender-test.js @@ -1,153 +1,173 @@ "use strict"; -var vows = require('vows') -, assert = require('assert') +var should = require('should') +, async = require('async') , path = require('path') , fs = require('fs') -, sandbox = require('sandboxed-module') -, log4js = require('../lib/log4js'); - -function removeFile(filename) { - return function() { - fs.unlink(path.join(__dirname, filename), function(err) { - if (err) { - console.log("Could not delete ", filename, err); +, sandbox = require('sandboxed-module'); + +function remove(filename, cb) { + fs.unlink(path.join(__dirname, filename), function(err) { + cb(); + }); +} + +describe('../lib/appenders/dateFile', function() { + describe('adding multiple dateFileAppenders', function() { + var files = [], initialListeners; + + before(function() { + var dateFileAppender = require('../lib/appenders/dateFile'), + count = 5, + logfile; + + initialListeners = process.listeners('exit').length; + + while (count--) { + logfile = path.join(__dirname, 'datefa-default-test' + count + '.log'); + dateFileAppender.configure({ + filename: logfile + }); + files.push(logfile); } }); - }; -} -vows.describe('../lib/appenders/dateFile').addBatch({ - 'appender': { - 'adding multiple dateFileAppenders': { - topic: function () { - var listenersCount = process.listeners('exit').length, - dateFileAppender = require('../lib/appenders/dateFile'), - count = 5, - logfile; - - while (count--) { - logfile = path.join(__dirname, 'datefa-default-test' + count + '.log'); - log4js.addAppender(dateFileAppender.appender(logfile)); - } - - return listenersCount; - }, - teardown: function() { - removeFile('datefa-default-test0.log')(); - removeFile('datefa-default-test1.log')(); - removeFile('datefa-default-test2.log')(); - removeFile('datefa-default-test3.log')(); - removeFile('datefa-default-test4.log')(); - }, + after(function(done) { + async.forEach(files, remove, done); + }); - 'should only add one `exit` listener': function (initialCount) { - assert.equal(process.listeners('exit').length, initialCount + 1); - }, - - }, - - 'exit listener': { - topic: function() { - var exitListener - , openedFiles = [] - , dateFileAppender = sandbox.require( - '../lib/appenders/dateFile', - { - globals: { - process: { - on: function(evt, listener) { - exitListener = listener; - } + it('should only add one `exit` listener', function () { + process.listeners('exit').length.should.be.below(initialListeners + 2); + }); + + }); + + describe('exit listener', function() { + var openedFiles = []; + + before(function() { + var exitListener + , dateFileAppender = sandbox.require( + '../lib/appenders/dateFile', + { + globals: { + process: { + on: function(evt, listener) { + exitListener = listener; } - }, - requires: { - '../streams': { - DateRollingFileStream: function(filename) { - openedFiles.push(filename); - - this.end = function() { - openedFiles.shift(); - }; - } + } + }, + requires: { + '../streams': { + DateRollingFileStream: function(filename) { + openedFiles.push(filename); + + this.end = function() { + openedFiles.shift(); + }; } - } - } - ); - for (var i=0; i < 5; i += 1) { - dateFileAppender.appender('test' + i); + } + } } - assert.isNotEmpty(openedFiles); - exitListener(); - return openedFiles; - }, - 'should close all open files': function(openedFiles) { - assert.isEmpty(openedFiles); + ); + + for (var i=0; i < 5; i += 1) { + dateFileAppender.configure({ + filename: 'test' + i + }); } - }, - - 'with default settings': { - topic: function() { - var that = this, - testFile = path.join(__dirname, 'date-appender-default.log'), - appender = require('../lib/appenders/dateFile').appender(testFile), - logger = log4js.getLogger('default-settings'); - log4js.clearAppenders(); - log4js.addAppender(appender, 'default-settings'); + + openedFiles.should.not.be.empty; + exitListener(); + }); + + it('should close all open files', function() { + openedFiles.should.be.empty; + }); + }); + + describe('with default settings', function() { + var contents; + + before(function(done) { + var testFile = path.join(__dirname, 'date-appender-default.log'), + log4js = require('../lib/log4js'), + logger = log4js.getLogger('default-settings'); + + log4js.configure({ + appenders: { + "date": { type: "dateFile", filename: testFile } + }, + categories: { + default: { level: "debug", appenders: [ "date" ] } + } + }); - logger.info("This should be in the file."); + logger.info("This should be in the file."); - setTimeout(function() { - fs.readFile(testFile, "utf8", that.callback); - }, 100); + setTimeout(function() { + fs.readFile(testFile, "utf8", function(err, data) { + contents = data; + done(err); + }); + }, 100); - }, - teardown: removeFile('date-appender-default.log'), + }); + + after(function(done) { + remove('date-appender-default.log', done); + }); - 'should write to the file': function(contents) { - assert.include(contents, 'This should be in the file'); - }, + it('should write to the file', function() { + contents.should.include('This should be in the file'); + }); - 'should use the basic layout': function(contents) { - assert.match( - contents, - /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - / - ); - } - } - - } -}).addBatch({ - 'configure': { - 'with dateFileAppender': { - topic: function() { + it('should use the basic layout', function() { + contents.should.match( + /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - / + ); + }); + }); + + describe('configure', function() { + describe('with dateFileAppender', function() { + var contents; + + before(function(done) { var log4js = require('../lib/log4js') - , logger; + , logger = log4js.getLogger('tests'); + //this config file defines one file appender (to ./date-file-test.log) //and sets the log level for "tests" to WARN log4js.configure('test/with-dateFile.json'); - logger = log4js.getLogger('tests'); logger.info('this should not be written to the file'); logger.warn('this should be written to the file'); - fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback); - }, - teardown: removeFile('date-file-test.log'), + fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', function(err, data) { + contents = data; + done(err); + }); + }); + + after(function(done) { + remove('date-file-test.log', done); + }); - 'should load appender configuration from a json file': function(err, contents) { - assert.include(contents, 'this should be written to the file' + require('os').EOL); - assert.equal(contents.indexOf('this should not be written to the file'), -1); - } - }, - 'with options.alwaysIncludePattern': { - topic: function() { - var self = this - , log4js = require('../lib/log4js') + it('should load appender configuration from a json file', function() { + contents.should.include('this should be written to the file' + require('os').EOL); + contents.should.not.include('this should not be written to the file'); + }); + }); + + describe('with options.alwaysIncludePattern', function() { + var contents, thisTime; + + before(function(done) { + var log4js = require('../lib/log4js') , format = require('../lib/date_format') , logger , options = { - "appenders": [ - { - "category": "tests", + "appenders": { + "datefile": { "type": "dateFile", "filename": "test/date-file-test", "pattern": "-from-MM-dd.log", @@ -156,63 +176,45 @@ vows.describe('../lib/appenders/dateFile').addBatch({ "type": "messagePassThrough" } } - ] - } - , thisTime = format.asString(options.appenders[0].pattern, new Date()); - fs.writeFileSync( + }, + categories: { default: { level: "debug", appenders: [ "datefile" ] } } + }; + thisTime = format.asString(options.appenders.datefile.pattern, new Date()); + + fs.writeFile( path.join(__dirname, 'date-file-test' + thisTime), "this is existing data" + require('os').EOL, - 'utf8' - ); - log4js.clearAppenders(); - log4js.configure(options); - logger = log4js.getLogger('tests'); - logger.warn('this should be written to the file with the appended date'); - this.teardown = removeFile('date-file-test' + thisTime); - //wait for filesystem to catch up - setTimeout(function() { - fs.readFile(path.join(__dirname, 'date-file-test' + thisTime), 'utf8', self.callback); - }, 100); - }, - 'should create file with the correct pattern': function(contents) { - assert.include(contents, 'this should be written to the file with the appended date'); - }, - 'should not overwrite the file on open (bug found in issue #132)': function(contents) { - assert.include(contents, 'this is existing data'); - } - }, - 'with cwd option': { - topic: function() { - var fileOpened, - appender = sandbox.require( - '../lib/appenders/dateFile', - { requires: - { '../streams': - { DateRollingFileStream: - function(file) { - fileOpened = file; - return { - on: function() {}, - end: function() {} - }; + 'utf8', + function(err) { + log4js.configure(options); + logger = log4js.getLogger('tests'); + logger.warn('this should be written to the file with the appended date'); + //wait for filesystem to catch up + setTimeout(function() { + fs.readFile( + path.join(__dirname, 'date-file-test' + thisTime), + 'utf8', + function(err, data) { + contents = data; + done(err); } - } - } + ); + }, 100); } ); - appender.configure( - { - filename: "whatever.log", - maxLogSize: 10 - }, - { cwd: '/absolute/path/to' } - ); - return fileOpened; - }, - 'should prepend options.cwd to config.filename': function(fileOpened) { - assert.equal(fileOpened, "/absolute/path/to/whatever.log"); - } - } - - } -}).exportTo(module); + }); + + after(function(done) { + remove('date-file-test' + thisTime, done); + }); + + it('should create file with the correct pattern', function() { + contents.should.include('this should be written to the file with the appended date'); + }); + + it('should not overwrite the file on open (bug found in issue #132)', function() { + contents.should.include('this is existing data'); + }); + }); + }); +}); diff --git a/test/fileAppender-test.js b/test/fileAppender-test.js index 11fe4f17..5e889ac2 100644 --- a/test/fileAppender-test.js +++ b/test/fileAppender-test.js @@ -1,22 +1,19 @@ "use strict"; var fs = require('fs') +, async = require('async') , path = require('path') , sandbox = require('sandboxed-module') , log4js = require('../lib/log4js') , should = require('should'); -function remove(filename) { - try { - fs.unlinkSync(filename); - } catch (e) { - //doesn't really matter if it failed - } +function remove(filename, cb) { + fs.unlink(filename, function(err) { cb(); }); } describe('log4js fileAppender', function() { describe('adding multiple fileAppenders', function() { - var initialCount, listenersCount; + var files = [], initialCount, listenersCount; before(function() { var logfile @@ -28,12 +25,17 @@ describe('log4js fileAppender', function() { while (count--) { logfile = path.join(__dirname, '/fa-default-test' + count + '.log'); config.appenders["file" + count] = { type: "file", filename: logfile }; + files.push(logfile); } log4js.configure(config); listenersCount = process.listeners('exit').length; }); + + after(function(done) { + async.forEach(files, remove, done); + }); it('does not add more than one `exit` listeners', function () { listenersCount.should.be.below(initialCount + 2); @@ -83,34 +85,38 @@ describe('log4js fileAppender', function() { }); describe('with default fileAppender settings', function() { - var fileContents; + var fileContents + , testFile = path.join(__dirname, '/fa-default-test.log'); before(function(done) { - var that = this - , testFile = path.join(__dirname, '/fa-default-test.log') - , logger = log4js.getLogger('default-settings'); + var logger = log4js.getLogger('default-settings'); - remove(testFile); + remove(testFile, function() { - log4js.configure({ - appenders: { - "file": { type: "file", filename: testFile } - }, - categories: { + log4js.configure({ + appenders: { + "file": { type: "file", filename: testFile } + }, + categories: { default: { level: "debug", appenders: [ "file" ] } - } - }); - - logger.info("This should be in the file."); - - setTimeout(function() { - fs.readFile(testFile, "utf8", function(err, contents) { - if (!err) { - fileContents = contents; } - done(err); }); - }, 100); + + logger.info("This should be in the file."); + + setTimeout(function() { + fs.readFile(testFile, "utf8", function(err, contents) { + if (!err) { + fileContents = contents; + } + done(err); + }); + }, 100); + }); + }); + + after(function(done) { + remove(testFile, done); }); it('should write log messages to the file', function() { @@ -127,24 +133,32 @@ describe('log4js fileAppender', function() { describe('with a max file size and no backups', function() { var testFile = path.join(__dirname, '/fa-maxFileSize-test.log'); - before(function() { + before(function(done) { var logger = log4js.getLogger('max-file-size'); - remove(testFile); - remove(testFile + '.1'); + async.forEach([ + testFile, + testFile + '.1' + ], remove, function() { - //log file of 100 bytes maximum, no backups - log4js.configure({ - appenders: { - "file": { type: "file", filename: testFile, maxLogSize: 100, backups: 0 } - }, - categories: { + //log file of 100 bytes maximum, no backups + log4js.configure({ + appenders: { + "file": { type: "file", filename: testFile, maxLogSize: 100, backups: 0 } + }, + categories: { default: { level: "debug", appenders: [ "file" ] } - } + } + }); + logger.info("This is the first log message."); + logger.info("This is an intermediate log message."); + logger.info("This is the second log message."); + done(); }); - logger.info("This is the first log message."); - logger.info("This is an intermediate log message."); - logger.info("This is the second log message."); + }); + + after(function(done) { + async.forEach([ testFile, testFile + '.1' ], remove, done); }); describe('log file', function() { @@ -177,26 +191,32 @@ describe('log4js fileAppender', function() { describe('with a max file size and 2 backups', function() { var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-test.log'); - before(function() { + before(function(done) { var logger = log4js.getLogger('max-file-size-backups'); - remove(testFile); - remove(testFile+'.1'); - remove(testFile+'.2'); + + async.forEach([ + testFile, + testFile+'.1', + testFile+'.2' + ], remove, function() { - //log file of 50 bytes maximum, 2 backups - log4js.configure({ - appenders: { - "file": { type: "file", filename: testFile, maxLogSize: 50, backups: 2 } - }, - categories: { + //log file of 50 bytes maximum, 2 backups + log4js.configure({ + appenders: { + "file": { type: "file", filename: testFile, maxLogSize: 50, backups: 2 } + }, + categories: { default: { level: "debug", appenders: [ "file" ] } - } - }); + } + }); + + logger.info("This is the first log message."); + logger.info("This is the second log message."); + logger.info("This is the third log message."); + logger.info("This is the fourth log message."); - logger.info("This is the first log message."); - logger.info("This is the second log message."); - logger.info("This is the third log message."); - logger.info("This is the fourth log message."); + done(); + }); }); describe('the log files', function() { @@ -219,6 +239,10 @@ describe('log4js fileAppender', function() { }, 200); }); + after(function(done) { + async.forEach(logFiles, remove, done); + }); + it('should be 3', function () { logFiles.should.have.length(3); }); diff --git a/test/with-dateFile.json b/test/with-dateFile.json index 18727433..31a23aae 100644 --- a/test/with-dateFile.json +++ b/test/with-dateFile.json @@ -1,7 +1,6 @@ { - "appenders": [ - { - "category": "tests", + "appenders": { + "dateFile": { "type": "dateFile", "filename": "test/date-file-test.log", "pattern": "-from-MM-dd", @@ -9,9 +8,9 @@ "type": "messagePassThrough" } } - ], + }, - "levels": { - "tests": "WARN" + "categories": { + "default": { "level": "WARN", "appenders": [ "dateFile" ] } } } From fe1f1228ca0dd3c2849a9b4447aa8c1e188ffb75 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 29 Aug 2013 08:42:22 +1000 Subject: [PATCH 39/53] removed underscore from dev dependencies --- package.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/package.json b/package.json index 6867d0d5..39969f55 100644 --- a/package.json +++ b/package.json @@ -21,7 +21,7 @@ "node": ">=0.8" }, "scripts": { - "test": "mocha" + "test": "mocha --recursive" }, "directories": { "test": "test", @@ -35,7 +35,6 @@ }, "devDependencies": { "sandboxed-module": "0.1.3", - "underscore": "1.2.1", "mocha": "~1.12.0", "should": "~1.2.2" }, From 46ad57b4e031cbcbb5f37f44230ab6aefbb5a956 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 29 Aug 2013 08:49:42 +1000 Subject: [PATCH 40/53] all tests converted to mocha --- test/clusteredAppender-test.js | 2 + test/dateFileAppender-test.js | 2 +- test/logger-test.js | 3 +- test/streams/BaseRollingFileStream-test.js | 97 +++-- test/streams/DateRollingFileStream-test.js | 345 ++++++++++-------- test/streams/rollingFileStream-test.js | 398 ++++++++++++--------- 6 files changed, 462 insertions(+), 385 deletions(-) diff --git a/test/clusteredAppender-test.js b/test/clusteredAppender-test.js index c60fdea0..34ba46f6 100755 --- a/test/clusteredAppender-test.js +++ b/test/clusteredAppender-test.js @@ -123,6 +123,8 @@ describe('log4js in a cluster', function() { 'process': { 'send': function(event) { events.push(event); + }, + 'env': { } } } diff --git a/test/dateFileAppender-test.js b/test/dateFileAppender-test.js index 883b2f9e..6ebf32bc 100644 --- a/test/dateFileAppender-test.js +++ b/test/dateFileAppender-test.js @@ -199,7 +199,7 @@ describe('../lib/appenders/dateFile', function() { done(err); } ); - }, 100); + }, 200); } ); }); diff --git a/test/logger-test.js b/test/logger-test.js index 832a8cd9..d2df5d03 100644 --- a/test/logger-test.js +++ b/test/logger-test.js @@ -1,5 +1,6 @@ "use strict"; var should = require('should') +, levels = require('../lib/levels') , Logger = require('../lib/logger'); describe('../lib/logger', function() { @@ -42,7 +43,7 @@ describe('../lib/logger', function() { it('should send log events to the dispatch delegate', function() { logger.debug("interesting thing"); event.should.have.property('category').equal('exciting category'); - event.should.have.property('level').equal('debug'); + event.should.have.property('level').equal(levels.DEBUG); event.should.have.property('data').eql(["interesting thing"]); event.should.have.property('startTime'); }); diff --git a/test/streams/BaseRollingFileStream-test.js b/test/streams/BaseRollingFileStream-test.js index a414d5a5..daf4b116 100644 --- a/test/streams/BaseRollingFileStream-test.js +++ b/test/streams/BaseRollingFileStream-test.js @@ -1,12 +1,12 @@ "use strict"; -var vows = require('vows') -, assert = require('assert') +var should = require('should') , fs = require('fs') , sandbox = require('sandboxed-module'); -vows.describe('../../lib/streams/BaseRollingFileStream').addBatch({ - 'when node version < 0.10.0': { - topic: function() { +describe('../../lib/streams/BaseRollingFileStream', function() { + describe('when node version < 0.10.0', function() { + it('should use readable-stream to maintain compatibility', function() { + var streamLib = sandbox.load( '../../lib/streams/BaseRollingFileStream', { @@ -22,16 +22,14 @@ vows.describe('../../lib/streams/BaseRollingFileStream').addBatch({ } } ); - return streamLib.required; - }, - 'it should use readable-stream to maintain compatibility': function(required) { - assert.ok(required['readable-stream']); - assert.ok(!required.stream); - } - }, - 'when node version > 0.10.0': { - topic: function() { + streamLib.required.should.have.property('readable-stream'); + streamLib.required.should.not.have.property('stream'); + }); + }); + + describe('when node version > 0.10.0', function() { + it('should use the core stream module', function() { var streamLib = sandbox.load( '../../lib/streams/BaseRollingFileStream', { @@ -47,47 +45,42 @@ vows.describe('../../lib/streams/BaseRollingFileStream').addBatch({ } } ); - return streamLib.required; - }, - 'it should use the core stream module': function(required) { - assert.ok(required.stream); - assert.ok(!required['readable-stream']); - } - }, - 'when no filename is passed': { - topic: require('../../lib/streams/BaseRollingFileStream'), - 'it should throw an error': function(BaseRollingFileStream) { - try { + streamLib.required.should.have.property('stream'); + streamLib.required.should.not.have.property('readable-stream'); + }); + }); + + describe('when no filename is passed', function() { + it('should throw an error', function() { + var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream'); + (function() { new BaseRollingFileStream(); - assert.fail('should not get here'); - } catch (e) { - assert.ok(e); - } - } - }, + }).should.throw(); + }); + }); + + describe('default behaviour', function() { + var stream; + + before(function() { + var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream'); + stream = new BaseRollingFileStream('basetest.log'); + }); + + after(function(done) { + fs.unlink('basetest.log', done); + }); + + it('should not want to roll', function() { + stream.shouldRoll().should.be.false; + }); - 'default behaviour': { - topic: function() { - var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream') - , stream = new BaseRollingFileStream('basetest.log'); - return stream; - }, - teardown: function() { - try { - fs.unlink('basetest.log'); - } catch (e) { - console.error("could not remove basetest.log", e); - } - }, - 'it should not want to roll': function(stream) { - assert.isFalse(stream.shouldRoll()); - }, - 'it should not roll': function(stream) { + it('should not roll', function() { var cbCalled = false; //just calls the callback straight away, no async calls stream.roll('basetest.log', function() { cbCalled = true; }); - assert.isTrue(cbCalled); - } - } -}).exportTo(module); + cbCalled.should.be.true; + }); + }); +}); diff --git a/test/streams/DateRollingFileStream-test.js b/test/streams/DateRollingFileStream-test.js index 33f014b2..92fe2696 100644 --- a/test/streams/DateRollingFileStream-test.js +++ b/test/streams/DateRollingFileStream-test.js @@ -1,6 +1,5 @@ "use strict"; -var vows = require('vows') -, assert = require('assert') +var should = require('should') , fs = require('fs') , semver = require('semver') , streams @@ -14,214 +13,242 @@ if (semver.satisfies(process.version, '>=0.10.0')) { } DateRollingFileStream = require('../../lib/streams').DateRollingFileStream; -function cleanUp(filename) { - return function() { - fs.unlink(filename); - }; +function remove(filename, cb) { + fs.unlink(filename, function() { cb(); }); } function now() { return testTime.getTime(); } -vows.describe('DateRollingFileStream').addBatch({ - 'arguments': { - topic: new DateRollingFileStream( +describe('DateRollingFileStream', function() { + describe('arguments', function() { + var stream = new DateRollingFileStream( __dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh' - ), - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'), + ); + + after(function(done) { + remove(__dirname + '/test-date-rolling-file-stream-1', done); + }); - 'should take a filename and a pattern and return a WritableStream': function(stream) { - assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1'); - assert.equal(stream.pattern, 'yyyy-mm-dd.hh'); - assert.instanceOf(stream, streams.Writable); - }, - 'with default settings for the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, 420); - assert.equal(stream.theStream.flags, 'a'); + it('should take a filename and a pattern and return a WritableStream', function() { + stream.filename.should.eql(__dirname + '/test-date-rolling-file-stream-1'); + stream.pattern.should.eql('yyyy-mm-dd.hh'); + stream.should.be.instanceOf(streams.Writable); + }); + + it('with default settings for the underlying stream', function() { + stream.theStream.mode.should.eql(420); + stream.theStream.flags.should.eql('a'); //encoding is not available on the underlying stream //assert.equal(stream.encoding, 'utf8'); - } - }, + }); + }); - 'default arguments': { - topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'), - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'), + describe('default arguments', function() { + var stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'); + + after(function(done) { + remove(__dirname + '/test-date-rolling-file-stream-2', done); + }); - 'pattern should be .yyyy-MM-dd': function(stream) { - assert.equal(stream.pattern, '.yyyy-MM-dd'); - } - }, + it('should have pattern of .yyyy-MM-dd', function() { + stream.pattern.should.eql('.yyyy-MM-dd'); + }); + }); - 'with stream arguments': { - topic: new DateRollingFileStream( + describe('with stream arguments', function() { + var stream = new DateRollingFileStream( __dirname + '/test-date-rolling-file-stream-3', 'yyyy-MM-dd', { mode: parseInt('0666', 8) } - ), - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'), + ); + + + after(function(done) { + remove(__dirname + '/test-date-rolling-file-stream-3', done); + }); - 'should pass them to the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, parseInt('0666', 8)); - } - }, + it('should pass them to the underlying stream', function() { + stream.theStream.mode.should.eql(parseInt('0666', 8)); + }); + }); - 'with stream arguments but no pattern': { - topic: new DateRollingFileStream( + describe('with stream arguments but no pattern', function() { + var stream = new DateRollingFileStream( __dirname + '/test-date-rolling-file-stream-4', { mode: parseInt('0666', 8) } - ), - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'), + ); + + after(function(done) { + remove(__dirname + '/test-date-rolling-file-stream-4', done); + }); - 'should pass them to the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, parseInt('0666', 8)); - }, - 'should use default pattern': function(stream) { - assert.equal(stream.pattern, '.yyyy-MM-dd'); - } - }, - - 'with a pattern of .yyyy-MM-dd': { - topic: function() { - var that = this, + it('should pass them to the underlying stream', function() { + stream.theStream.mode.should.eql(parseInt('0666', 8)); + }); + + it('should use default pattern', function() { + stream.pattern.should.eql('.yyyy-MM-dd'); + }); + }); + + describe('with a pattern of .yyyy-MM-dd', function() { + + var stream; + + before(function(done) { stream = new DateRollingFileStream( __dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now ); - stream.write("First message\n", 'utf8', function() { - that.callback(null, stream); - }); - }, - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'), + stream.write("First message\n", 'utf8', done); + }); + + after(function(done) { + remove(__dirname + '/test-date-rolling-file-stream-5', done); + }); - 'should create a file with the base name': { - topic: function(stream) { - fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback); - }, - 'file should contain first message': function(result) { - assert.equal(result.toString(), "First message\n"); - } - }, - - 'when the day changes': { - topic: function(stream) { + it('should create a file with the base name', function(done) { + fs.readFile(__dirname + '/test-date-rolling-file-stream-5', 'utf8', function(err, contents) { + contents.should.eql("First message\n"); + done(err); + }); + }); + + describe('when the day changes', function() { + + before(function(done) { testTime = new Date(2012, 8, 13, 0, 10, 12); - stream.write("Second message\n", 'utf8', this.callback); - }, - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'), + stream.write("Second message\n", 'utf8', done); + }); + + after(function(done) { + remove(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', done); + }); + + describe('the number of files', function() { + var files = []; + + before(function(done) { + fs.readdir(__dirname, function(err, list) { + files = list; + done(err); + }); + }); + it('should be two', function() { + files.filter( + function(file) { + return file.indexOf('test-date-rolling-file-stream-5') > -1; + } + ).should.have.length(2); + }); + }); - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be two': function(files) { - assert.equal( - files.filter( - function(file) { - return file.indexOf('test-date-rolling-file-stream-5') > -1; - } - ).length, - 2 + describe('the file without a date', function() { + it('should contain the second message', function(done) { + fs.readFile( + __dirname + '/test-date-rolling-file-stream-5', 'utf8', + function(err, contents) { + contents.should.eql("Second message\n"); + done(err); + } ); - } - }, - - 'the file without a date': { - topic: function() { - fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback); - }, - 'should contain the second message': function(contents) { - assert.equal(contents.toString(), "Second message\n"); - } - }, + }); + }); - 'the file with the date': { - topic: function() { - fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback); - }, - 'should contain the first message': function(contents) { - assert.equal(contents.toString(), "First message\n"); - } - } - } - }, - - 'with alwaysIncludePattern': { - topic: function() { - var that = this, + describe('the file with the date', function() { + it('should contain the first message', function(done) { + fs.readFile( + __dirname + '/test-date-rolling-file-stream-5.2012-09-12', 'utf8', + function(err, contents) { + contents.should.eql("First message\n"); + done(err); + } + ); + }); + }); + }); + }); + + describe('with alwaysIncludePattern', function() { + var stream; + + before(function(done) { testTime = new Date(2012, 8, 12, 0, 10, 12), stream = new DateRollingFileStream( __dirname + '/test-date-rolling-file-stream-pattern', '.yyyy-MM-dd', - {alwaysIncludePattern: true}, + { alwaysIncludePattern: true }, now ); - stream.write("First message\n", 'utf8', function() { - that.callback(null, stream); - }); - }, - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12'), + stream.write("First message\n", 'utf8', done); + }); + + after(function(done) { + remove(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', done); + }); - 'should create a file with the pattern set': { - topic: function(stream) { - fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', this.callback); - }, - 'file should contain first message': function(result) { - assert.equal(result.toString(), "First message\n"); - } - }, + it('should create a file with the pattern set', function(done) { + fs.readFile( + __dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', 'utf8', + function(err, contents) { + contents.should.eql("First message\n"); + done(err); + } + ); + }); - 'when the day changes': { - topic: function(stream) { + describe('when the day changes', function() { + before(function(done) { testTime = new Date(2012, 8, 13, 0, 10, 12); - stream.write("Second message\n", 'utf8', this.callback); - }, - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13'), + stream.write("Second message\n", 'utf8', done); + }); + + after(function(done) { + remove(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13', done); + }); - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be two': function(files) { - assert.equal( + describe('the number of files', function() { + it('should be two', function(done) { + fs.readdir(__dirname, function(err, files) { files.filter( function(file) { return file.indexOf('test-date-rolling-file-stream-pattern') > -1; } - ).length, - 2 - ); - } - }, + ).should.have.length(2); + done(err); + }); + }); + }); - 'the file with the later date': { - topic: function() { + describe('the file with the later date', function() { + it('should contain the second message', function(done) { fs.readFile( - __dirname + '/test-date-rolling-file-stream-pattern.2012-09-13', - this.callback + __dirname + '/test-date-rolling-file-stream-pattern.2012-09-13', 'utf8', + function(err, contents) { + contents.should.eql("Second message\n"); + done(err); + } ); - }, - 'should contain the second message': function(contents) { - assert.equal(contents.toString(), "Second message\n"); - } - }, + }); + }); - 'the file with the date': { - topic: function() { + describe('the file with the date', function() { + it('should contain the first message', function(done) { fs.readFile( - __dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', - this.callback + __dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', 'utf8', + function(err, contents) { + contents.should.eql("First message\n"); + done(err); + } ); - }, - 'should contain the first message': function(contents) { - assert.equal(contents.toString(), "First message\n"); - } - } - } - } - -}).exportTo(module); + }); + }); + }); + }); +}); diff --git a/test/streams/rollingFileStream-test.js b/test/streams/rollingFileStream-test.js index f39c2dc0..eb3465da 100644 --- a/test/streams/rollingFileStream-test.js +++ b/test/streams/rollingFileStream-test.js @@ -1,8 +1,6 @@ "use strict"; -var vows = require('vows') -, async = require('async') -, assert = require('assert') -, events = require('events') +var async = require('async') +, should = require('should') , fs = require('fs') , semver = require('semver') , streams @@ -15,196 +13,252 @@ if (semver.satisfies(process.version, '>=0.10.0')) { } RollingFileStream = require('../../lib/streams').RollingFileStream; -function remove(filename) { - try { - fs.unlinkSync(filename); - } catch (e) { - //doesn't really matter if it failed - } +function remove(filename, cb) { + fs.unlink(filename, function() { cb(); }); } -function create(filename) { - fs.writeFileSync(filename, "test file"); +function create(filename, cb) { + fs.writeFile(filename, "test file", cb); } -vows.describe('RollingFileStream').addBatch({ - 'arguments': { - topic: function() { - remove(__dirname + "/test-rolling-file-stream"); - return new RollingFileStream("test-rolling-file-stream", 1024, 5); - }, - 'should take a filename, file size (bytes), no. backups, return Writable': function(stream) { - assert.instanceOf(stream, streams.Writable); - assert.equal(stream.filename, "test-rolling-file-stream"); - assert.equal(stream.size, 1024); - assert.equal(stream.backups, 5); - }, - 'with default settings for the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, 420); - assert.equal(stream.theStream.flags, 'a'); +describe('RollingFileStream', function() { + + describe('arguments', function() { + var stream; + + before(function(done) { + remove(__dirname + "/test-rolling-file-stream", function() { + stream = new RollingFileStream("test-rolling-file-stream", 1024, 5); + done(); + }); + }); + + after(function(done) { + remove(__dirname + "/test-rolling-file-stream", done); + }); + + it('should take a filename, file size (bytes), no. backups, return Writable', function() { + stream.should.be.an.instanceOf(streams.Writable); + stream.filename.should.eql("test-rolling-file-stream"); + stream.size.should.eql(1024); + stream.backups.should.eql(5); + }); + + it('should apply default settings to the underlying stream', function() { + stream.theStream.mode.should.eql(420); + stream.theStream.flags.should.eql('a'); //encoding isn't a property on the underlying stream //assert.equal(stream.theStream.encoding, 'utf8'); - } - }, - 'with stream arguments': { - topic: function() { - remove(__dirname + '/test-rolling-file-stream'); - return new RollingFileStream( + }); + }); + + describe('with stream arguments', function() { + it('should pass them to the underlying stream', function() { + var stream = new RollingFileStream( 'test-rolling-file-stream', 1024, 5, { mode: parseInt('0666', 8) } ); - }, - 'should pass them to the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, parseInt('0666', 8)); - } - }, - 'without size': { - topic: function() { - try { + stream.theStream.mode.should.eql(parseInt('0666', 8)); + }); + + after(function(done) { + remove(__dirname + '/test-rolling-file-stream', done); + }); + }); + + describe('without size', function() { + it('should throw an error', function() { + (function() { new RollingFileStream(__dirname + "/test-rolling-file-stream"); - } catch (e) { - return e; - } - }, - 'should throw an error': function(err) { - assert.instanceOf(err, Error); - } - }, - 'without number of backups': { - topic: function() { - remove('test-rolling-file-stream'); - return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024); - }, - 'should default to 1 backup': function(stream) { - assert.equal(stream.backups, 1); - } - }, - 'writing less than the file size': { - topic: function() { - remove(__dirname + "/test-rolling-file-stream-write-less"); - var that = this - , stream = new RollingFileStream( - __dirname + "/test-rolling-file-stream-write-less", - 100 - ); - stream.write("cheese", "utf8", function() { - stream.end(); - fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback); - }); - }, - 'should write to the file': function(contents) { - assert.equal(contents, "cheese"); - }, - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be one': function(files) { - assert.equal( - files.filter( - function(file) { - return file.indexOf('test-rolling-file-stream-write-less') > -1; - } - ).length, - 1 + }).should.throw(); + }); + }); + + describe('without number of backups', function() { + it('should default to 1 backup', function() { + var stream = new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024); + stream.backups.should.eql(1); + }); + + after(function(done) { + remove(__dirname + "/test-rolling-file-stream", done); + }); + }); + + describe('writing less than the file size', function() { + + before(function(done) { + remove(__dirname + "/test-rolling-file-stream-write-less", function() { + var stream = new RollingFileStream( + __dirname + "/test-rolling-file-stream-write-less", + 100 ); - } - } - }, - 'writing more than the file size': { - topic: function() { - remove(__dirname + "/test-rolling-file-stream-write-more"); - remove(__dirname + "/test-rolling-file-stream-write-more.1"); - var that = this - , stream = new RollingFileStream( - __dirname + "/test-rolling-file-stream-write-more", - 45 + stream.write("cheese", "utf8", function() { + stream.end(done); + }); + }); + }); + + after(function(done) { + remove(__dirname + "/test-rolling-file-stream-write-less", done); + }); + + it('should write to the file', function(done) { + fs.readFile( + __dirname + "/test-rolling-file-stream-write-less", "utf8", + function(err, contents) { + contents.should.eql("cheese"); + done(err); + } ); + }); + + it('should write one file', function(done) { + fs.readdir(__dirname, function(err, files) { + files.filter( + function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; } + ).should.have.length(1); + done(err); + }); + }); + }); + + describe('writing more than the file size', function() { + before(function(done) { async.forEach( - [0, 1, 2, 3, 4, 5, 6], - function(i, cb) { - stream.write(i +".cheese\n", "utf8", cb); - }, + [ + __dirname + "/test-rolling-file-stream-write-more", + __dirname + "/test-rolling-file-stream-write-more.1" + ], + remove, function() { - stream.end(); - that.callback(); + var stream = new RollingFileStream( + __dirname + "/test-rolling-file-stream-write-more", + 45 + ); + async.forEachSeries( + [0, 1, 2, 3, 4, 5, 6], + function(i, cb) { + stream.write(i +".cheese\n", "utf8", cb); + }, + function() { + stream.end(done); + } + ); } ); - }, - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be two': function(files) { - assert.equal(files.filter( + }); + + after(function(done) { + async.forEach( + [ + __dirname + "/test-rolling-file-stream-write-more", + __dirname + "/test-rolling-file-stream-write-more.1" + ], + remove, + done + ); + }); + + it('should write two files' , function(done) { + fs.readdir(__dirname, function(err, files) { + files.filter( function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; } - ).length, 2); - } - }, - 'the first file': { - topic: function() { - fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback); - }, - 'should contain the last two log messages': function(contents) { - assert.equal(contents, '5.cheese\n6.cheese\n'); - } - }, - 'the second file': { - topic: function() { - fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback); - }, - 'should contain the first five log messages': function(contents) { - assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n'); - } - } - }, - 'when many files already exist': { - topic: function() { - remove(__dirname + '/test-rolling-stream-with-existing-files.11'); - remove(__dirname + '/test-rolling-stream-with-existing-files.20'); - remove(__dirname + '/test-rolling-stream-with-existing-files.-1'); - remove(__dirname + '/test-rolling-stream-with-existing-files.1.1'); - remove(__dirname + '/test-rolling-stream-with-existing-files.1'); - - - create(__dirname + '/test-rolling-stream-with-existing-files.11'); - create(__dirname + '/test-rolling-stream-with-existing-files.20'); - create(__dirname + '/test-rolling-stream-with-existing-files.-1'); - create(__dirname + '/test-rolling-stream-with-existing-files.1.1'); - create(__dirname + '/test-rolling-stream-with-existing-files.1'); - - var that = this - , stream = new RollingFileStream( - __dirname + "/test-rolling-stream-with-existing-files", - 45, - 5 + ).should.have.length(2); + done(err); + }); + }); + + it('should write the last two log messages to the first file', function(done) { + fs.readFile( + __dirname + "/test-rolling-file-stream-write-more", "utf8", + function(err, contents) { + contents.should.eql('5.cheese\n6.cheese\n'); + done(err); + }); + }); + + it('should write the first five log messages to the second file', function(done) { + fs.readFile( + __dirname + '/test-rolling-file-stream-write-more.1', "utf8", + function(err, contents) { + contents.should.eql('0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n'); + done(err); + } ); + }); + }); + + describe('when many files already exist', function() { + before(function(done) { async.forEach( - [0, 1, 2, 3, 4, 5, 6], - function(i, cb) { - stream.write(i +".cheese\n", "utf8", cb); - }, - function() { - stream.end(); - that.callback(); + [ + __dirname + '/test-rolling-stream-with-existing-files.11', + __dirname + '/test-rolling-stream-with-existing-files.20', + __dirname + '/test-rolling-stream-with-existing-files.-1', + __dirname + '/test-rolling-stream-with-existing-files.1.1', + __dirname + '/test-rolling-stream-with-existing-files.1' + ], + remove, + function(err) { + if (err) done(err); + + async.forEach( + [ + __dirname + '/test-rolling-stream-with-existing-files.11', + __dirname + '/test-rolling-stream-with-existing-files.20', + __dirname + '/test-rolling-stream-with-existing-files.-1', + __dirname + '/test-rolling-stream-with-existing-files.1.1', + __dirname + '/test-rolling-stream-with-existing-files.1' + ], + create, + function(err) { + if (err) done(err); + + var stream = new RollingFileStream( + __dirname + "/test-rolling-stream-with-existing-files", + 45, + 5 + ); + + async.forEachSeries( + [0, 1, 2, 3, 4, 5, 6], + function(i, cb) { + stream.write(i +".cheese\n", "utf8", cb); + }, + function() { + stream.end(done); + } + ); + } + ); } ); - }, - 'the files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be rolled': function(files) { - assert.include(files, 'test-rolling-stream-with-existing-files'); - assert.include(files, 'test-rolling-stream-with-existing-files.1'); - assert.include(files, 'test-rolling-stream-with-existing-files.2'); - assert.include(files, 'test-rolling-stream-with-existing-files.11'); - assert.include(files, 'test-rolling-stream-with-existing-files.20'); - } - } - } -}).exportTo(module); + }); + + after(function(done) { + async.forEach([ + 'test-rolling-stream-with-existing-files', + 'test-rolling-stream-with-existing-files.1', + 'test-rolling-stream-with-existing-files.2', + 'test-rolling-stream-with-existing-files.11', + 'test-rolling-stream-with-existing-files.20' + ], remove, done); + }); + + it('should roll the files', function(done) { + fs.readdir(__dirname, function(err, files) { + files.should.include('test-rolling-stream-with-existing-files'); + files.should.include('test-rolling-stream-with-existing-files.1'); + files.should.include('test-rolling-stream-with-existing-files.2'); + files.should.include('test-rolling-stream-with-existing-files.11'); + files.should.include('test-rolling-stream-with-existing-files.20'); + done(err); + }); + }); + }); +}); From 5e144e4004775198ca0f1ef5d2c0d5f30a49a5ac Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 29 Aug 2013 16:56:40 +1000 Subject: [PATCH 41/53] fixing some lint issues --- .bob.json | 6 +- .jshintrc | 5 +- lib/appenders/dateFile.js | 4 -- lib/layouts.js | 19 +++-- lib/log4js.js | 43 +++++++++--- test/categoryFilter-test.js | 5 +- test/clusteredAppender-test.js | 2 +- test/consoleAppender-test.js | 2 +- test/dateFileAppender-test.js | 1 + test/date_format-test.js | 14 +++- test/fileAppender-test.js | 17 +++-- test/layouts-test.js | 75 ++++++++++---------- test/levels-test.js | 5 ++ test/log4js-test.js | 81 ++++++++++++---------- test/streams/BaseRollingFileStream-test.js | 4 +- test/streams/DateRollingFileStream-test.js | 19 +++-- test/streams/rollingFileStream-test.js | 2 +- 17 files changed, 181 insertions(+), 123 deletions(-) diff --git a/.bob.json b/.bob.json index c8c1e02c..2ae59b4e 100644 --- a/.bob.json +++ b/.bob.json @@ -1,12 +1,12 @@ { - "build": "clean lint coverage test", + "build": "clean lint test coverage", "lint": { "type": "jshint" }, "coverage": { - "type": "vows" + "type": "mocha-istanbul" }, "test": { - "type": "vows" + "type": "mocha" } } diff --git a/.jshintrc b/.jshintrc index fe10cbad..bee01092 100644 --- a/.jshintrc +++ b/.jshintrc @@ -10,6 +10,9 @@ "maxlen": 100, "globals": { "describe": true, - "it": true + "it": true, + "before": true, + "beforeEach": true, + "after": true } } diff --git a/lib/appenders/dateFile.js b/lib/appenders/dateFile.js index 6317c688..ba4c2599 100644 --- a/lib/appenders/dateFile.js +++ b/lib/appenders/dateFile.js @@ -47,10 +47,6 @@ function configure(config, options) { config.alwaysIncludePattern = false; } - if (options && options.cwd && !config.absolute) { - config.filename = path.join(options.cwd, config.filename); - } - return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout); } diff --git a/lib/layouts.js b/lib/layouts.js index 9043448f..acda6a30 100644 --- a/lib/layouts.js +++ b/lib/layouts.js @@ -160,20 +160,17 @@ function patternLayout (pattern, tokens) { return loggerName; } + var formats = { + "ISO8601": dateFormat.ISO8601_FORMAT, + "ISO8601_WITH_TZ_OFFSET": dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, + "ABSOLUTE": dateFormat.ABSOLUTETIME_FORMAT, + "DATE": dateFormat.DATETIME_FORMAT + }; + function formatAsDate(loggingEvent, specifier) { var format = dateFormat.ISO8601_FORMAT; if (specifier) { - format = specifier; - // Pick up special cases - if (format == "ISO8601") { - format = dateFormat.ISO8601_FORMAT; - } else if (format == "ISO8601_WITH_TZ_OFFSET") { - format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT; - } else if (format == "ABSOLUTE") { - format = dateFormat.ABSOLUTETIME_FORMAT; - } else if (format == "DATE") { - format = dateFormat.DATETIME_FORMAT; - } + format = formats[specifier] || specifier; } // Format the date return dateFormat.asString(format, loggingEvent.startTime); diff --git a/lib/log4js.js b/lib/log4js.js index 3a112f89..48b4ce02 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -125,7 +125,14 @@ function workerDispatch(event) { function dispatch(event) { debug("event is ", event); var category = categories[event.category] || categories.default; - debug("category.level[", category.level, "] <= ", event.level, " ? ", category.level.isLessThanOrEqualTo(event.level)); + debug( + "category.level[", + category.level, + "] <= ", + event.level, + " ? ", + category.level.isLessThanOrEqualTo(event.level) + ); if (category.level.isLessThanOrEqualTo(event.level)) { category.appenders.forEach(function(appender) { @@ -182,7 +189,11 @@ function validateCategories(cats) { } category.level = levels.toLevel(inputLevel); if (!category.level) { - throw new Error("Level '" + inputLevel + "' is not valid for category '" + categoryName + "'. Acceptable values are: " + levels.levels.join(', ') + "."); + throw new Error( + "Level '" + inputLevel + + "' is not valid for category '" + categoryName + + "'. Acceptable values are: " + levels.levels.join(', ') + "." + ); } if (!category.appenders || !category.appenders.length) { @@ -191,7 +202,11 @@ function validateCategories(cats) { category.appenders.forEach(function(appender) { if (!appenders[appender]) { - throw new Error("Appender '" + appender + "' for category '" + categoryName + "' does not exist. Known appenders are: " + Object.keys(appenders).join(', ') + "."); + throw new Error( + "Appender '" + appender + + "' for category '" + categoryName + + "' does not exist. Known appenders are: " + Object.keys(appenders).join(', ') + "." + ); } }); }); @@ -213,14 +228,20 @@ function appenderByName(name) { function configureAppenders(appenderMap) { clearAppenders(); Object.keys(appenderMap).forEach(function(appenderName) { - var appender, appenderConfig = appenderMap[appenderName]; - loadAppender(appenderConfig.type); - appenderConfig.makers = appenderMakers; - try { - appenders[appenderName] = appenderMakers[appenderConfig.type](appenderConfig, appenderByName); - } catch(e) { - throw new Error("log4js configuration problem for appender '" + appenderName + "'. Error was " + e.stack); - } + var appender, appenderConfig = appenderMap[appenderName]; + loadAppender(appenderConfig.type); + appenderConfig.makers = appenderMakers; + try { + appenders[appenderName] = appenderMakers[appenderConfig.type]( + appenderConfig, + appenderByName + ); + } catch(e) { + throw new Error( + "log4js configuration problem for appender '" + appenderName + + "'. Error was " + e.stack + ); + } }); } diff --git a/test/categoryFilter-test.js b/test/categoryFilter-test.js index 99a477c4..dcdc1112 100644 --- a/test/categoryFilter-test.js +++ b/test/categoryFilter-test.js @@ -85,7 +85,10 @@ describe('log4js', function() { 'utf8', function(err, contents) { var lines = contents.trim().split('\n'); - lines.should.eql(["This should not", "This shouldn't be included by the appender anyway"]); + lines.should.eql([ + "This should not", + "This shouldn't be included by the appender anyway" + ]); done(err); } ); diff --git a/test/clusteredAppender-test.js b/test/clusteredAppender-test.js index 34ba46f6..511a07dd 100755 --- a/test/clusteredAppender-test.js +++ b/test/clusteredAppender-test.js @@ -43,7 +43,7 @@ describe('log4js in a cluster', function() { }); it('should listen for fork events', function() { - clusterOnFork.should.be.true; + clusterOnFork.should.eql(true); }); it('should listen for messages from workers', function() { diff --git a/test/consoleAppender-test.js b/test/consoleAppender-test.js index c36e5094..c537326b 100644 --- a/test/consoleAppender-test.js +++ b/test/consoleAppender-test.js @@ -25,7 +25,7 @@ describe('../lib/appenders/console', function() { }); it('should output to console', function() { - messages.should.eql(["blah"]); + messages.should.eql(["blah"]); }); }); diff --git a/test/dateFileAppender-test.js b/test/dateFileAppender-test.js index 6ebf32bc..5082a590 100644 --- a/test/dateFileAppender-test.js +++ b/test/dateFileAppender-test.js @@ -1,4 +1,5 @@ "use strict"; +/*jshint expr:true */ var should = require('should') , async = require('async') , path = require('path') diff --git a/test/date_format-test.js b/test/date_format-test.js index de2c5460..06e46ecb 100644 --- a/test/date_format-test.js +++ b/test/date_format-test.js @@ -15,10 +15,20 @@ describe('date_format', function() { it('should provide a ISO8601 with timezone offset format', function() { date.getTimezoneOffset = function() { return -660; }; - dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date).should.eql("2010-01-11T14:31:30+1100"); + dateFormat.asString( + dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, + date + ).should.eql( + "2010-01-11T14:31:30+1100" + ); date.getTimezoneOffset = function() { return 120; }; - dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date).should.eql("2010-01-11T14:31:30-0200"); + dateFormat.asString( + dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, + date + ).should.eql( + "2010-01-11T14:31:30-0200" + ); }); it('should provide a just-the-time format', function() { diff --git a/test/fileAppender-test.js b/test/fileAppender-test.js index 5e889ac2..aee123a7 100644 --- a/test/fileAppender-test.js +++ b/test/fileAppender-test.js @@ -1,4 +1,5 @@ "use strict"; +/*jshint expr:true */ var fs = require('fs') , async = require('async') , path = require('path') @@ -18,9 +19,12 @@ describe('log4js fileAppender', function() { before(function() { var logfile , count = 5 - , config = { appenders: {}, categories: { default: { level: "debug", appenders: ["file0"] } } }; + , config = { + appenders: {}, + categories: { default: { level: "debug", appenders: ["file0"] } } + }; - initialCount = process.listeners('exit').length + initialCount = process.listeners('exit').length; while (count--) { logfile = path.join(__dirname, '/fa-default-test' + count + '.log'); @@ -98,7 +102,7 @@ describe('log4js fileAppender', function() { "file": { type: "file", filename: testFile } }, categories: { - default: { level: "debug", appenders: [ "file" ] } + "default": { level: "debug", appenders: [ "file" ] } } }); @@ -147,7 +151,7 @@ describe('log4js fileAppender', function() { "file": { type: "file", filename: testFile, maxLogSize: 100, backups: 0 } }, categories: { - default: { level: "debug", appenders: [ "file" ] } + "default": { level: "debug", appenders: [ "file" ] } } }); logger.info("This is the first log message."); @@ -206,7 +210,7 @@ describe('log4js fileAppender', function() { "file": { type: "file", filename: testFile, maxLogSize: 50, backups: 2 } }, categories: { - default: { level: "debug", appenders: [ "file" ] } + "default": { level: "debug", appenders: [ "file" ] } } }); @@ -315,7 +319,8 @@ describe('log4js fileAppender', function() { } ); fileAppender.configure({ - filename: 'test1.log', maxLogSize: 100 + filename: 'test1.log', + maxLogSize: 100 }); errorHandler({ error: 'aargh' }); }); diff --git a/test/layouts-test.js b/test/layouts-test.js index 26bce675..bcf6b2d4 100644 --- a/test/layouts-test.js +++ b/test/layouts-test.js @@ -76,22 +76,19 @@ describe('log4js layouts', function() { }); it('should print the stacks of a passed error objects', function() { - assert.ok( - Array.isArray( - layout({ - data: [ new Error() ], - startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - category: "cheese", - level: { - colour: "green", - toString: function() { return "ERROR"; } - } - }).match( - /Error\s+at Context\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s/ - ) - ), - 'regexp did not return a match' - ); + assert.ok(Array.isArray( + layout({ + data: [ new Error() ], + startTime: new Date(2010, 11, 5, 14, 18, 30, 45), + category: "cheese", + level: { + colour: "green", + toString: function() { return "ERROR"; } + } + }).match( + /Error\s+at Context\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s/ + ) + ), 'regexp did not return a match'); }); describe('with passed augmented errors', function() { @@ -203,7 +200,13 @@ describe('log4js layouts', function() { event.startTime.getTimezoneOffset = function() { return 0; }; it('should default to "time logLevel loggerName - message"', function() { - test(layout, event, tokens, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n"); + test( + layout, + event, + tokens, + null, + "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n" + ); }); it('%r should output time only', function() { @@ -295,24 +298,26 @@ describe('log4js layouts', function() { test(layout, event, tokens, '%[%r%]', '\x1B[36m14:18:30\x1B[39m'); }); - it('%x{testString} should output the string stored in tokens', function() { - test(layout, event, tokens, '%x{testString}', 'testStringToken'); - }); - - it('%x{testFunction} should output the result of the function stored in tokens', function() { - test(layout, event, tokens, '%x{testFunction}', 'testFunctionToken'); - }); - - it('%x{doesNotExist} should output the string stored in tokens', function() { - test(layout, event, tokens, '%x{doesNotExist}', '%x{doesNotExist}'); - }); - - it('%x{fnThatUsesLogEvent} should be able to use the logEvent', function() { - test(layout, event, tokens, '%x{fnThatUsesLogEvent}', 'DEBUG'); - }); - - it('%x should output the string stored in tokens', function() { - test(layout, event, tokens, '%x', '%x'); + describe('%x{}', function() { + it('%x{testString} should output the string stored in tokens', function() { + test(layout, event, tokens, '%x{testString}', 'testStringToken'); + }); + + it('%x{testFunction} should output the result of the function stored in tokens', function() { + test(layout, event, tokens, '%x{testFunction}', 'testFunctionToken'); + }); + + it('%x{doesNotExist} should output the string stored in tokens', function() { + test(layout, event, tokens, '%x{doesNotExist}', '%x{doesNotExist}'); + }); + + it('%x{fnThatUsesLogEvent} should be able to use the logEvent', function() { + test(layout, event, tokens, '%x{fnThatUsesLogEvent}', 'DEBUG'); + }); + + it('%x should output the string stored in tokens', function() { + test(layout, event, tokens, '%x', '%x'); + }); }); }); diff --git a/test/levels-test.js b/test/levels-test.js index f3fc6452..e901f268 100644 --- a/test/levels-test.js +++ b/test/levels-test.js @@ -417,6 +417,11 @@ describe('../lib/levels', function() { it('should return the default value if argument is not recognised', function() { levels.toLevel("cheese", levels.DEBUG).should.eql(levels.DEBUG); }); + + it('should return the default value if argument is falsy', function() { + levels.toLevel(undefined, levels.DEBUG).should.eql(levels.DEBUG); + levels.toLevel(null, levels.DEBUG).should.eql(levels.DEBUG); + }); }); }); diff --git a/test/log4js-test.js b/test/log4js-test.js index fe1658fc..d4571f68 100644 --- a/test/log4js-test.js +++ b/test/log4js-test.js @@ -54,40 +54,44 @@ describe('../lib/log4js', function() { }); - it('should complain if the config does not specify an appender for the default category', function() { - - (function() { - log4js.configure( - { + it( + 'should complain if the config does not specify an appender for the default category', + function() { + + (function() { + log4js.configure( + { + appenders: { + "console": { type: "console" } + }, + categories: {} + } + ); + }).should.throw( + "You must specify an appender for the default category" + ); + + (function() { + log4js.configure({ appenders: { "console": { type: "console" } - }, - categories: {} - } - ); - }).should.throw( - "You must specify an appender for the default category" - ); - - (function() { - log4js.configure({ - appenders: { - "console": { type: "console" } - }, - categories: { - "cheese": { level: "DEBUG", appenders: [ "console" ] } - } - }); - }).should.throw( - "You must specify an appender for the default category" - ); - - }); + }, + categories: { + "cheese": { level: "DEBUG", appenders: [ "console" ] } + } + }); + }).should.throw( + "You must specify an appender for the default category" + ); + + } + ); it('should complain if a category does not specify level or appenders', function() { (function() { log4js.configure( - { appenders: { "console": { type: "console" } }, + { + appenders: { "console": { type: "console" } }, categories: { "default": { thing: "thing" } } @@ -99,7 +103,8 @@ describe('../lib/log4js', function() { (function() { log4js.configure( - { appenders: { "console": { type: "console" } }, + { + appenders: { "console": { type: "console" } }, categories: { "default": { level: "DEBUG" } } @@ -113,7 +118,8 @@ describe('../lib/log4js', function() { it('should complain if a category specifies a level that does not exist', function() { (function() { log4js.configure( - { appenders: { "console": { type: "console" }}, + { + appenders: { "console": { type: "console" }}, categories: { "default": { level: "PICKLES" } } @@ -128,7 +134,8 @@ describe('../lib/log4js', function() { it('should complain if a category specifies an appender that does not exist', function() { (function() { log4js.configure( - { appenders: { "console": { type: "console" }}, + { + appenders: { "console": { type: "console" }}, categories: { "default": { level: "DEBUG", appenders: [ "cheese" ] } } @@ -212,8 +219,8 @@ describe('../lib/log4js', function() { it('should read config from a file', function() { var events = [], log4js_sandbox = sandbox.require( '../lib/log4js', - { requires: - { + { + requires: { 'cheese': { configure: function() { return function(event) { events.push(event); }; @@ -235,8 +242,8 @@ describe('../lib/log4js', function() { , noisyLogger , log4js_sandbox = sandbox.require( '../lib/log4js', - { requires: - { + { + requires: { 'cheese': { configure: function() { return function(event) { events.push(event); }; @@ -264,8 +271,8 @@ describe('../lib/log4js', function() { var events = [] , log4js_sandbox = sandbox.require( '../lib/log4js', - { requires: - { + { + requires: { 'cheese': { configure: function() { return function(event) { events.push(event); }; diff --git a/test/streams/BaseRollingFileStream-test.js b/test/streams/BaseRollingFileStream-test.js index daf4b116..cb4b2dbc 100644 --- a/test/streams/BaseRollingFileStream-test.js +++ b/test/streams/BaseRollingFileStream-test.js @@ -73,14 +73,14 @@ describe('../../lib/streams/BaseRollingFileStream', function() { }); it('should not want to roll', function() { - stream.shouldRoll().should.be.false; + stream.shouldRoll().should.eql(false); }); it('should not roll', function() { var cbCalled = false; //just calls the callback straight away, no async calls stream.roll('basetest.log', function() { cbCalled = true; }); - cbCalled.should.be.true; + cbCalled.should.eql(true); }); }); }); diff --git a/test/streams/DateRollingFileStream-test.js b/test/streams/DateRollingFileStream-test.js index 92fe2696..960443ec 100644 --- a/test/streams/DateRollingFileStream-test.js +++ b/test/streams/DateRollingFileStream-test.js @@ -178,14 +178,19 @@ describe('DateRollingFileStream', function() { var stream; before(function(done) { - testTime = new Date(2012, 8, 12, 0, 10, 12), - stream = new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-pattern', - '.yyyy-MM-dd', - { alwaysIncludePattern: true }, - now + testTime = new Date(2012, 8, 12, 0, 10, 12); + remove( + __dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', + function() { + stream = new DateRollingFileStream( + __dirname + '/test-date-rolling-file-stream-pattern', + '.yyyy-MM-dd', + { alwaysIncludePattern: true }, + now + ); + stream.write("First message\n", 'utf8', done); + } ); - stream.write("First message\n", 'utf8', done); }); after(function(done) { diff --git a/test/streams/rollingFileStream-test.js b/test/streams/rollingFileStream-test.js index eb3465da..18a20f2e 100644 --- a/test/streams/rollingFileStream-test.js +++ b/test/streams/rollingFileStream-test.js @@ -14,7 +14,7 @@ if (semver.satisfies(process.version, '>=0.10.0')) { RollingFileStream = require('../../lib/streams').RollingFileStream; function remove(filename, cb) { - fs.unlink(filename, function() { cb(); }); + fs.unlink(filename, function(err) { cb(); }); } function create(filename, cb) { From 72c4fb48dbaea8534431216cce809c2d919fe976 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 29 Aug 2013 22:15:50 +1000 Subject: [PATCH 42/53] extracted streams, date-format into separate modules --- lib/appenders/dateFile.js | 2 +- lib/appenders/file.js | 2 +- lib/date_format.js | 66 ------ lib/layouts.js | 2 +- lib/streams/BaseRollingFileStream.js | 84 ------- lib/streams/DateRollingFileStream.js | 88 ------- lib/streams/RollingFileStream.js | 89 ------- lib/streams/index.js | 2 - package.json | 4 +- test/dateFileAppender-test.js | 4 +- test/date_format-test.js | 42 ---- test/fileAppender-test.js | 4 +- test/streams/BaseRollingFileStream-test.js | 86 ------- test/streams/DateRollingFileStream-test.js | 259 -------------------- test/streams/rollingFileStream-test.js | 264 --------------------- 15 files changed, 10 insertions(+), 988 deletions(-) delete mode 100644 lib/date_format.js delete mode 100644 lib/streams/BaseRollingFileStream.js delete mode 100644 lib/streams/DateRollingFileStream.js delete mode 100644 lib/streams/RollingFileStream.js delete mode 100644 lib/streams/index.js delete mode 100644 test/date_format-test.js delete mode 100644 test/streams/BaseRollingFileStream-test.js delete mode 100644 test/streams/DateRollingFileStream-test.js delete mode 100644 test/streams/rollingFileStream-test.js diff --git a/lib/appenders/dateFile.js b/lib/appenders/dateFile.js index ba4c2599..b6b296de 100644 --- a/lib/appenders/dateFile.js +++ b/lib/appenders/dateFile.js @@ -1,5 +1,5 @@ "use strict"; -var streams = require('../streams') +var streams = require('streamroller') , layouts = require('../layouts') , path = require('path') , os = require('os') diff --git a/lib/appenders/file.js b/lib/appenders/file.js index c8566162..b4357187 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -2,7 +2,7 @@ var layouts = require('../layouts') , path = require('path') , fs = require('fs') -, streams = require('../streams') +, streams = require('streamroller') , os = require('os') , eol = os.EOL || '\n' , openFiles = []; diff --git a/lib/date_format.js b/lib/date_format.js deleted file mode 100644 index d75ce20d..00000000 --- a/lib/date_format.js +++ /dev/null @@ -1,66 +0,0 @@ -"use strict"; -exports.ISO8601_FORMAT = "yyyy-MM-dd hh:mm:ss.SSS"; -exports.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ssO"; -exports.DATETIME_FORMAT = "dd MM yyyy hh:mm:ss.SSS"; -exports.ABSOLUTETIME_FORMAT = "hh:mm:ss.SSS"; - -function padWithZeros(vNumber, width) { - var numAsString = vNumber + ""; - while (numAsString.length < width) { - numAsString = "0" + numAsString; - } - return numAsString; -} - -function addZero(vNumber) { - return padWithZeros(vNumber, 2); -} - -/** - * Formats the TimeOffest - * Thanks to http://www.svendtofte.com/code/date_format/ - * @private - */ -function offset(date) { - // Difference to Greenwich time (GMT) in hours - var os = Math.abs(date.getTimezoneOffset()); - var h = String(Math.floor(os/60)); - var m = String(os%60); - if (h.length == 1) { - h = "0" + h; - } - if (m.length == 1) { - m = "0" + m; - } - return date.getTimezoneOffset() < 0 ? "+"+h+m : "-"+h+m; -} - -exports.asString = function(/*format,*/ date) { - var format = exports.ISO8601_FORMAT; - if (typeof(date) === "string") { - format = arguments[0]; - date = arguments[1]; - } - - var vDay = addZero(date.getDate()); - var vMonth = addZero(date.getMonth()+1); - var vYearLong = addZero(date.getFullYear()); - var vYearShort = addZero(date.getFullYear().toString().substring(2,4)); - var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort); - var vHour = addZero(date.getHours()); - var vMinute = addZero(date.getMinutes()); - var vSecond = addZero(date.getSeconds()); - var vMillisecond = padWithZeros(date.getMilliseconds(), 3); - var vTimeZone = offset(date); - var formatted = format - .replace(/dd/g, vDay) - .replace(/MM/g, vMonth) - .replace(/y{1,4}/g, vYear) - .replace(/hh/g, vHour) - .replace(/mm/g, vMinute) - .replace(/ss/g, vSecond) - .replace(/SSS/g, vMillisecond) - .replace(/O/g, vTimeZone); - return formatted; - -}; diff --git a/lib/layouts.js b/lib/layouts.js index acda6a30..82c3397b 100644 --- a/lib/layouts.js +++ b/lib/layouts.js @@ -1,5 +1,5 @@ "use strict"; -var dateFormat = require('./date_format') +var dateFormat = require('date-format') , os = require('os') , eol = os.EOL || '\n' , util = require('util') diff --git a/lib/streams/BaseRollingFileStream.js b/lib/streams/BaseRollingFileStream.js deleted file mode 100644 index a9e4f14c..00000000 --- a/lib/streams/BaseRollingFileStream.js +++ /dev/null @@ -1,84 +0,0 @@ -"use strict"; -var fs = require('fs') -, stream -, debug = require('debug')('log4js:BaseRollingFileStream') -, util = require('util') -, semver = require('semver'); - -if (semver.satisfies(process.version, '>=0.10.0')) { - stream = require('stream'); -} else { - stream = require('readable-stream'); -} - -module.exports = BaseRollingFileStream; - -function BaseRollingFileStream(filename, options) { - debug("In BaseRollingFileStream"); - this.filename = filename; - this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' }; - this.currentSize = 0; - - function currentFileSize(file) { - var fileSize = 0; - try { - fileSize = fs.statSync(file).size; - } catch (e) { - // file does not exist - } - return fileSize; - } - - function throwErrorIfArgumentsAreNotValid() { - if (!filename) { - throw new Error("You must specify a filename"); - } - } - - throwErrorIfArgumentsAreNotValid(); - debug("Calling BaseRollingFileStream.super"); - BaseRollingFileStream.super_.call(this); - this.openTheStream(); - this.currentSize = currentFileSize(this.filename); -} -util.inherits(BaseRollingFileStream, stream.Writable); - -BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) { - var that = this; - function writeTheChunk() { - debug("writing the chunk to the underlying stream"); - that.currentSize += chunk.length; - that.theStream.write(chunk, encoding, callback); - } - - debug("in _write"); - - if (this.shouldRoll()) { - this.currentSize = 0; - this.roll(this.filename, writeTheChunk); - } else { - writeTheChunk(); - } -}; - -BaseRollingFileStream.prototype.openTheStream = function(cb) { - debug("opening the underlying stream"); - this.theStream = fs.createWriteStream(this.filename, this.options); - if (cb) { - this.theStream.on("open", cb); - } -}; - -BaseRollingFileStream.prototype.closeTheStream = function(cb) { - debug("closing the underlying stream"); - this.theStream.end(cb); -}; - -BaseRollingFileStream.prototype.shouldRoll = function() { - return false; // default behaviour is never to roll -}; - -BaseRollingFileStream.prototype.roll = function(filename, callback) { - callback(); // default behaviour is not to do anything -}; - diff --git a/lib/streams/DateRollingFileStream.js b/lib/streams/DateRollingFileStream.js deleted file mode 100644 index fc6dd936..00000000 --- a/lib/streams/DateRollingFileStream.js +++ /dev/null @@ -1,88 +0,0 @@ -"use strict"; -var BaseRollingFileStream = require('./BaseRollingFileStream') -, debug = require('debug')('log4js:DateRollingFileStream') -, format = require('../date_format') -, async = require('async') -, fs = require('fs') -, util = require('util'); - -module.exports = DateRollingFileStream; - -function DateRollingFileStream(filename, pattern, options, now) { - debug("Now is ", now); - if (pattern && typeof(pattern) === 'object') { - now = options; - options = pattern; - pattern = null; - } - this.pattern = pattern || '.yyyy-MM-dd'; - this.now = now || Date.now; - this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now())); - this.baseFilename = filename; - this.alwaysIncludePattern = false; - - if (options) { - if (options.alwaysIncludePattern) { - this.alwaysIncludePattern = true; - filename = this.baseFilename + this.lastTimeWeWroteSomething; - } - delete options.alwaysIncludePattern; - if (Object.keys(options).length === 0) { - options = null; - } - } - debug("this.now is ", this.now, ", now is ", now); - - DateRollingFileStream.super_.call(this, filename, options); -} -util.inherits(DateRollingFileStream, BaseRollingFileStream); - -DateRollingFileStream.prototype.shouldRoll = function() { - var lastTime = this.lastTimeWeWroteSomething, - thisTime = format.asString(this.pattern, new Date(this.now())); - - debug("DateRollingFileStream.shouldRoll with now = ", - this.now(), ", thisTime = ", thisTime, ", lastTime = ", lastTime); - - this.lastTimeWeWroteSomething = thisTime; - this.previousTime = lastTime; - - return thisTime !== lastTime; -}; - -DateRollingFileStream.prototype.roll = function(filename, callback) { - var that = this; - - debug("Starting roll"); - - if (this.alwaysIncludePattern) { - this.filename = this.baseFilename + this.lastTimeWeWroteSomething; - async.series([ - this.closeTheStream.bind(this), - this.openTheStream.bind(this) - ], callback); - } else { - var newFilename = this.baseFilename + this.previousTime; - async.series([ - this.closeTheStream.bind(this), - deleteAnyExistingFile, - renameTheCurrentFile, - this.openTheStream.bind(this) - ], callback); - } - - function deleteAnyExistingFile(cb) { - //on windows, you can get a EEXIST error if you rename a file to an existing file - //so, we'll try to delete the file we're renaming to first - fs.unlink(newFilename, function (err) { - //ignore err: if we could not delete, it's most likely that it doesn't exist - cb(); - }); - } - - function renameTheCurrentFile(cb) { - debug("Renaming the ", filename, " -> ", newFilename); - fs.rename(filename, newFilename, cb); - } - -}; diff --git a/lib/streams/RollingFileStream.js b/lib/streams/RollingFileStream.js deleted file mode 100644 index 26d2bb17..00000000 --- a/lib/streams/RollingFileStream.js +++ /dev/null @@ -1,89 +0,0 @@ -"use strict"; -var BaseRollingFileStream = require('./BaseRollingFileStream') -, debug = require('debug')('log4js:RollingFileStream') -, util = require('util') -, path = require('path') -, fs = require('fs') -, async = require('async'); - -module.exports = RollingFileStream; - -function RollingFileStream (filename, size, backups, options) { - this.size = size; - this.backups = backups || 1; - - function throwErrorIfArgumentsAreNotValid() { - if (!filename || !size || size <= 0) { - throw new Error("You must specify a filename and file size"); - } - } - - throwErrorIfArgumentsAreNotValid(); - - RollingFileStream.super_.call(this, filename, options); -} -util.inherits(RollingFileStream, BaseRollingFileStream); - -RollingFileStream.prototype.shouldRoll = function() { - debug("should roll with current size %d, and max size %d", this.currentSize, this.size); - return this.currentSize >= this.size; -}; - -RollingFileStream.prototype.roll = function(filename, callback) { - var that = this, - nameMatcher = new RegExp('^' + path.basename(filename)); - - function justTheseFiles (item) { - return nameMatcher.test(item); - } - - function index(filename_) { - return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0; - } - - function byIndex(a, b) { - if (index(a) > index(b)) { - return 1; - } else if (index(a) < index(b) ) { - return -1; - } else { - return 0; - } - } - - function increaseFileIndex (fileToRename, cb) { - var idx = index(fileToRename); - debug('Index of ', fileToRename, ' is ', idx); - if (idx < that.backups) { - //on windows, you can get a EEXIST error if you rename a file to an existing file - //so, we'll try to delete the file we're renaming to first - fs.unlink(filename + '.' + (idx+1), function (err) { - //ignore err: if we could not delete, it's most likely that it doesn't exist - debug('Renaming ', fileToRename, ' -> ', filename, '.', (idx+1)); - fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb); - }); - } else { - cb(); - } - } - - function renameTheFiles(cb) { - //roll the backups (rename file.n to file.n+1, where n <= numBackups) - debug("Renaming the old files"); - fs.readdir(path.dirname(filename), function (err, files) { - async.forEachSeries( - files.filter(justTheseFiles).sort(byIndex).reverse(), - increaseFileIndex, - cb - ); - }); - } - - debug("Rolling, rolling, rolling"); - async.series([ - this.closeTheStream.bind(this), - renameTheFiles, - this.openTheStream.bind(this) - ], callback); - -}; diff --git a/lib/streams/index.js b/lib/streams/index.js deleted file mode 100644 index f9f57b99..00000000 --- a/lib/streams/index.js +++ /dev/null @@ -1,2 +0,0 @@ -exports.RollingFileStream = require('./RollingFileStream'); -exports.DateRollingFileStream = require('./DateRollingFileStream'); diff --git a/package.json b/package.json index 39969f55..a26c7375 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,9 @@ "async": "0.1.15", "semver": "~1.1.4", "readable-stream": "~1.0.2", - "debug": "~0.7.2" + "debug": "~0.7.2", + "streamroller": "0.0.0", + "date-format": "0.0.0" }, "devDependencies": { "sandboxed-module": "0.1.3", diff --git a/test/dateFileAppender-test.js b/test/dateFileAppender-test.js index 5082a590..38b3d00e 100644 --- a/test/dateFileAppender-test.js +++ b/test/dateFileAppender-test.js @@ -58,7 +58,7 @@ describe('../lib/appenders/dateFile', function() { } }, requires: { - '../streams': { + 'streamroller': { DateRollingFileStream: function(filename) { openedFiles.push(filename); @@ -164,7 +164,7 @@ describe('../lib/appenders/dateFile', function() { before(function(done) { var log4js = require('../lib/log4js') - , format = require('../lib/date_format') + , format = require('date-format') , logger , options = { "appenders": { diff --git a/test/date_format-test.js b/test/date_format-test.js deleted file mode 100644 index 06e46ecb..00000000 --- a/test/date_format-test.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; -var should = require('should') -, dateFormat = require('../lib/date_format'); - -describe('date_format', function() { - var date = new Date(2010, 0, 11, 14, 31, 30, 5); - - it('should format a date as string using a pattern', function() { - dateFormat.asString(dateFormat.DATETIME_FORMAT, date).should.eql("11 01 2010 14:31:30.005"); - }); - - it('should default to the ISO8601 format', function() { - dateFormat.asString(date).should.eql('2010-01-11 14:31:30.005'); - }); - - it('should provide a ISO8601 with timezone offset format', function() { - date.getTimezoneOffset = function() { return -660; }; - dateFormat.asString( - dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, - date - ).should.eql( - "2010-01-11T14:31:30+1100" - ); - - date.getTimezoneOffset = function() { return 120; }; - dateFormat.asString( - dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, - date - ).should.eql( - "2010-01-11T14:31:30-0200" - ); - }); - - it('should provide a just-the-time format', function() { - dateFormat.asString(dateFormat.ABSOLUTETIME_FORMAT, date).should.eql('14:31:30.005'); - }); - - it('should provide a custom format', function() { - date.getTimezoneOffset = function() { return 120; }; - dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date).should.eql('-0200.005.30.31.14.11.01.10'); - }); -}); diff --git a/test/fileAppender-test.js b/test/fileAppender-test.js index aee123a7..20401f13 100644 --- a/test/fileAppender-test.js +++ b/test/fileAppender-test.js @@ -62,7 +62,7 @@ describe('log4js fileAppender', function() { } }, requires: { - '../streams': { + 'streamroller': { RollingFileStream: function(filename) { openedFiles.push(filename); @@ -304,7 +304,7 @@ describe('log4js fileAppender', function() { } }, requires: { - '../streams': { + 'streamroller': { RollingFileStream: function(filename) { this.end = function() {}; diff --git a/test/streams/BaseRollingFileStream-test.js b/test/streams/BaseRollingFileStream-test.js deleted file mode 100644 index cb4b2dbc..00000000 --- a/test/streams/BaseRollingFileStream-test.js +++ /dev/null @@ -1,86 +0,0 @@ -"use strict"; -var should = require('should') -, fs = require('fs') -, sandbox = require('sandboxed-module'); - -describe('../../lib/streams/BaseRollingFileStream', function() { - describe('when node version < 0.10.0', function() { - it('should use readable-stream to maintain compatibility', function() { - - var streamLib = sandbox.load( - '../../lib/streams/BaseRollingFileStream', - { - globals: { - process: { - version: '0.8.11' - } - }, - requires: { - 'readable-stream': { - Writable: function() {} - } - } - } - ); - - streamLib.required.should.have.property('readable-stream'); - streamLib.required.should.not.have.property('stream'); - }); - }); - - describe('when node version > 0.10.0', function() { - it('should use the core stream module', function() { - var streamLib = sandbox.load( - '../../lib/streams/BaseRollingFileStream', - { - globals: { - process: { - version: '0.10.1' - } - }, - requires: { - 'stream': { - Writable: function() {} - } - } - } - ); - - streamLib.required.should.have.property('stream'); - streamLib.required.should.not.have.property('readable-stream'); - }); - }); - - describe('when no filename is passed', function() { - it('should throw an error', function() { - var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream'); - (function() { - new BaseRollingFileStream(); - }).should.throw(); - }); - }); - - describe('default behaviour', function() { - var stream; - - before(function() { - var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream'); - stream = new BaseRollingFileStream('basetest.log'); - }); - - after(function(done) { - fs.unlink('basetest.log', done); - }); - - it('should not want to roll', function() { - stream.shouldRoll().should.eql(false); - }); - - it('should not roll', function() { - var cbCalled = false; - //just calls the callback straight away, no async calls - stream.roll('basetest.log', function() { cbCalled = true; }); - cbCalled.should.eql(true); - }); - }); -}); diff --git a/test/streams/DateRollingFileStream-test.js b/test/streams/DateRollingFileStream-test.js deleted file mode 100644 index 960443ec..00000000 --- a/test/streams/DateRollingFileStream-test.js +++ /dev/null @@ -1,259 +0,0 @@ -"use strict"; -var should = require('should') -, fs = require('fs') -, semver = require('semver') -, streams -, DateRollingFileStream -, testTime = new Date(2012, 8, 12, 10, 37, 11); - -if (semver.satisfies(process.version, '>=0.10.0')) { - streams = require('stream'); -} else { - streams = require('readable-stream'); -} -DateRollingFileStream = require('../../lib/streams').DateRollingFileStream; - -function remove(filename, cb) { - fs.unlink(filename, function() { cb(); }); -} - -function now() { - return testTime.getTime(); -} - -describe('DateRollingFileStream', function() { - describe('arguments', function() { - var stream = new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-1', - 'yyyy-mm-dd.hh' - ); - - after(function(done) { - remove(__dirname + '/test-date-rolling-file-stream-1', done); - }); - - it('should take a filename and a pattern and return a WritableStream', function() { - stream.filename.should.eql(__dirname + '/test-date-rolling-file-stream-1'); - stream.pattern.should.eql('yyyy-mm-dd.hh'); - stream.should.be.instanceOf(streams.Writable); - }); - - it('with default settings for the underlying stream', function() { - stream.theStream.mode.should.eql(420); - stream.theStream.flags.should.eql('a'); - //encoding is not available on the underlying stream - //assert.equal(stream.encoding, 'utf8'); - }); - }); - - describe('default arguments', function() { - var stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'); - - after(function(done) { - remove(__dirname + '/test-date-rolling-file-stream-2', done); - }); - - it('should have pattern of .yyyy-MM-dd', function() { - stream.pattern.should.eql('.yyyy-MM-dd'); - }); - }); - - describe('with stream arguments', function() { - var stream = new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-3', - 'yyyy-MM-dd', - { mode: parseInt('0666', 8) } - ); - - - after(function(done) { - remove(__dirname + '/test-date-rolling-file-stream-3', done); - }); - - it('should pass them to the underlying stream', function() { - stream.theStream.mode.should.eql(parseInt('0666', 8)); - }); - }); - - describe('with stream arguments but no pattern', function() { - var stream = new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-4', - { mode: parseInt('0666', 8) } - ); - - after(function(done) { - remove(__dirname + '/test-date-rolling-file-stream-4', done); - }); - - it('should pass them to the underlying stream', function() { - stream.theStream.mode.should.eql(parseInt('0666', 8)); - }); - - it('should use default pattern', function() { - stream.pattern.should.eql('.yyyy-MM-dd'); - }); - }); - - describe('with a pattern of .yyyy-MM-dd', function() { - - var stream; - - before(function(done) { - stream = new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', - null, - now - ); - stream.write("First message\n", 'utf8', done); - }); - - after(function(done) { - remove(__dirname + '/test-date-rolling-file-stream-5', done); - }); - - it('should create a file with the base name', function(done) { - fs.readFile(__dirname + '/test-date-rolling-file-stream-5', 'utf8', function(err, contents) { - contents.should.eql("First message\n"); - done(err); - }); - }); - - describe('when the day changes', function() { - - before(function(done) { - testTime = new Date(2012, 8, 13, 0, 10, 12); - stream.write("Second message\n", 'utf8', done); - }); - - after(function(done) { - remove(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', done); - }); - - describe('the number of files', function() { - var files = []; - - before(function(done) { - fs.readdir(__dirname, function(err, list) { - files = list; - done(err); - }); - }); - - it('should be two', function() { - files.filter( - function(file) { - return file.indexOf('test-date-rolling-file-stream-5') > -1; - } - ).should.have.length(2); - }); - }); - - describe('the file without a date', function() { - it('should contain the second message', function(done) { - fs.readFile( - __dirname + '/test-date-rolling-file-stream-5', 'utf8', - function(err, contents) { - contents.should.eql("Second message\n"); - done(err); - } - ); - }); - }); - - describe('the file with the date', function() { - it('should contain the first message', function(done) { - fs.readFile( - __dirname + '/test-date-rolling-file-stream-5.2012-09-12', 'utf8', - function(err, contents) { - contents.should.eql("First message\n"); - done(err); - } - ); - }); - }); - }); - }); - - describe('with alwaysIncludePattern', function() { - var stream; - - before(function(done) { - testTime = new Date(2012, 8, 12, 0, 10, 12); - remove( - __dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', - function() { - stream = new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-pattern', - '.yyyy-MM-dd', - { alwaysIncludePattern: true }, - now - ); - stream.write("First message\n", 'utf8', done); - } - ); - }); - - after(function(done) { - remove(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', done); - }); - - it('should create a file with the pattern set', function(done) { - fs.readFile( - __dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', 'utf8', - function(err, contents) { - contents.should.eql("First message\n"); - done(err); - } - ); - }); - - describe('when the day changes', function() { - before(function(done) { - testTime = new Date(2012, 8, 13, 0, 10, 12); - stream.write("Second message\n", 'utf8', done); - }); - - after(function(done) { - remove(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13', done); - }); - - - describe('the number of files', function() { - it('should be two', function(done) { - fs.readdir(__dirname, function(err, files) { - files.filter( - function(file) { - return file.indexOf('test-date-rolling-file-stream-pattern') > -1; - } - ).should.have.length(2); - done(err); - }); - }); - }); - - describe('the file with the later date', function() { - it('should contain the second message', function(done) { - fs.readFile( - __dirname + '/test-date-rolling-file-stream-pattern.2012-09-13', 'utf8', - function(err, contents) { - contents.should.eql("Second message\n"); - done(err); - } - ); - }); - }); - - describe('the file with the date', function() { - it('should contain the first message', function(done) { - fs.readFile( - __dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', 'utf8', - function(err, contents) { - contents.should.eql("First message\n"); - done(err); - } - ); - }); - }); - }); - }); -}); diff --git a/test/streams/rollingFileStream-test.js b/test/streams/rollingFileStream-test.js deleted file mode 100644 index 18a20f2e..00000000 --- a/test/streams/rollingFileStream-test.js +++ /dev/null @@ -1,264 +0,0 @@ -"use strict"; -var async = require('async') -, should = require('should') -, fs = require('fs') -, semver = require('semver') -, streams -, RollingFileStream; - -if (semver.satisfies(process.version, '>=0.10.0')) { - streams = require('stream'); -} else { - streams = require('readable-stream'); -} -RollingFileStream = require('../../lib/streams').RollingFileStream; - -function remove(filename, cb) { - fs.unlink(filename, function(err) { cb(); }); -} - -function create(filename, cb) { - fs.writeFile(filename, "test file", cb); -} - -describe('RollingFileStream', function() { - - describe('arguments', function() { - var stream; - - before(function(done) { - remove(__dirname + "/test-rolling-file-stream", function() { - stream = new RollingFileStream("test-rolling-file-stream", 1024, 5); - done(); - }); - }); - - after(function(done) { - remove(__dirname + "/test-rolling-file-stream", done); - }); - - it('should take a filename, file size (bytes), no. backups, return Writable', function() { - stream.should.be.an.instanceOf(streams.Writable); - stream.filename.should.eql("test-rolling-file-stream"); - stream.size.should.eql(1024); - stream.backups.should.eql(5); - }); - - it('should apply default settings to the underlying stream', function() { - stream.theStream.mode.should.eql(420); - stream.theStream.flags.should.eql('a'); - //encoding isn't a property on the underlying stream - //assert.equal(stream.theStream.encoding, 'utf8'); - }); - }); - - describe('with stream arguments', function() { - it('should pass them to the underlying stream', function() { - var stream = new RollingFileStream( - 'test-rolling-file-stream', - 1024, - 5, - { mode: parseInt('0666', 8) } - ); - stream.theStream.mode.should.eql(parseInt('0666', 8)); - }); - - after(function(done) { - remove(__dirname + '/test-rolling-file-stream', done); - }); - }); - - describe('without size', function() { - it('should throw an error', function() { - (function() { - new RollingFileStream(__dirname + "/test-rolling-file-stream"); - }).should.throw(); - }); - }); - - describe('without number of backups', function() { - it('should default to 1 backup', function() { - var stream = new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024); - stream.backups.should.eql(1); - }); - - after(function(done) { - remove(__dirname + "/test-rolling-file-stream", done); - }); - }); - - describe('writing less than the file size', function() { - - before(function(done) { - remove(__dirname + "/test-rolling-file-stream-write-less", function() { - var stream = new RollingFileStream( - __dirname + "/test-rolling-file-stream-write-less", - 100 - ); - stream.write("cheese", "utf8", function() { - stream.end(done); - }); - }); - }); - - after(function(done) { - remove(__dirname + "/test-rolling-file-stream-write-less", done); - }); - - it('should write to the file', function(done) { - fs.readFile( - __dirname + "/test-rolling-file-stream-write-less", "utf8", - function(err, contents) { - contents.should.eql("cheese"); - done(err); - } - ); - }); - - it('should write one file', function(done) { - fs.readdir(__dirname, function(err, files) { - files.filter( - function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; } - ).should.have.length(1); - done(err); - }); - }); - }); - - describe('writing more than the file size', function() { - before(function(done) { - async.forEach( - [ - __dirname + "/test-rolling-file-stream-write-more", - __dirname + "/test-rolling-file-stream-write-more.1" - ], - remove, - function() { - var stream = new RollingFileStream( - __dirname + "/test-rolling-file-stream-write-more", - 45 - ); - async.forEachSeries( - [0, 1, 2, 3, 4, 5, 6], - function(i, cb) { - stream.write(i +".cheese\n", "utf8", cb); - }, - function() { - stream.end(done); - } - ); - } - ); - }); - - after(function(done) { - async.forEach( - [ - __dirname + "/test-rolling-file-stream-write-more", - __dirname + "/test-rolling-file-stream-write-more.1" - ], - remove, - done - ); - }); - - it('should write two files' , function(done) { - fs.readdir(__dirname, function(err, files) { - files.filter( - function(file) { - return file.indexOf('test-rolling-file-stream-write-more') > -1; - } - ).should.have.length(2); - done(err); - }); - }); - - it('should write the last two log messages to the first file', function(done) { - fs.readFile( - __dirname + "/test-rolling-file-stream-write-more", "utf8", - function(err, contents) { - contents.should.eql('5.cheese\n6.cheese\n'); - done(err); - }); - }); - - it('should write the first five log messages to the second file', function(done) { - fs.readFile( - __dirname + '/test-rolling-file-stream-write-more.1', "utf8", - function(err, contents) { - contents.should.eql('0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n'); - done(err); - } - ); - }); - }); - - describe('when many files already exist', function() { - before(function(done) { - async.forEach( - [ - __dirname + '/test-rolling-stream-with-existing-files.11', - __dirname + '/test-rolling-stream-with-existing-files.20', - __dirname + '/test-rolling-stream-with-existing-files.-1', - __dirname + '/test-rolling-stream-with-existing-files.1.1', - __dirname + '/test-rolling-stream-with-existing-files.1' - ], - remove, - function(err) { - if (err) done(err); - - async.forEach( - [ - __dirname + '/test-rolling-stream-with-existing-files.11', - __dirname + '/test-rolling-stream-with-existing-files.20', - __dirname + '/test-rolling-stream-with-existing-files.-1', - __dirname + '/test-rolling-stream-with-existing-files.1.1', - __dirname + '/test-rolling-stream-with-existing-files.1' - ], - create, - function(err) { - if (err) done(err); - - var stream = new RollingFileStream( - __dirname + "/test-rolling-stream-with-existing-files", - 45, - 5 - ); - - async.forEachSeries( - [0, 1, 2, 3, 4, 5, 6], - function(i, cb) { - stream.write(i +".cheese\n", "utf8", cb); - }, - function() { - stream.end(done); - } - ); - } - ); - } - ); - }); - - after(function(done) { - async.forEach([ - 'test-rolling-stream-with-existing-files', - 'test-rolling-stream-with-existing-files.1', - 'test-rolling-stream-with-existing-files.2', - 'test-rolling-stream-with-existing-files.11', - 'test-rolling-stream-with-existing-files.20' - ], remove, done); - }); - - it('should roll the files', function(done) { - fs.readdir(__dirname, function(err, files) { - files.should.include('test-rolling-stream-with-existing-files'); - files.should.include('test-rolling-stream-with-existing-files.1'); - files.should.include('test-rolling-stream-with-existing-files.2'); - files.should.include('test-rolling-stream-with-existing-files.11'); - files.should.include('test-rolling-stream-with-existing-files.20'); - done(err); - }); - }); - }); -}); From 80f8f5a795b06267ee661b073ca2d4f549663f9a Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 29 Aug 2013 22:18:34 +1000 Subject: [PATCH 43/53] added streams, date-format changes --- 0.7-changes | 2 ++ 1 file changed, 2 insertions(+) diff --git a/0.7-changes b/0.7-changes index 33d8aabc..430623bd 100644 --- a/0.7-changes +++ b/0.7-changes @@ -13,3 +13,5 @@ replaced my debug lib with tjholowaychuk's debug (more of a standard) options.cwd removed - filenames should always be specified in full, not relative loglevelfilter changed to accept a list of log levels it allows appenders that wrap other appenders must reference them by name +extracted streams to streamroller +extracted date_format.js to date-format From 09d43008755388d6d49d5d1143993f11f1f28e91 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 29 Aug 2013 22:23:32 +1000 Subject: [PATCH 44/53] removed async, semver, readable-stream deps --- package.json | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/package.json b/package.json index a26c7375..09acc176 100644 --- a/package.json +++ b/package.json @@ -28,14 +28,12 @@ "lib": "lib" }, "dependencies": { - "async": "0.1.15", - "semver": "~1.1.4", - "readable-stream": "~1.0.2", "debug": "~0.7.2", "streamroller": "0.0.0", "date-format": "0.0.0" }, "devDependencies": { + "async": "0.1.15", "sandboxed-module": "0.1.3", "mocha": "~1.12.0", "should": "~1.2.2" From 3142497e98bab1870441e6f5c7ed642df801f580 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 29 Aug 2013 22:36:05 +1000 Subject: [PATCH 45/53] bumped streamroller version --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 09acc176..cec2a668 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,7 @@ }, "dependencies": { "debug": "~0.7.2", - "streamroller": "0.0.0", + "streamroller": "0.0.1", "date-format": "0.0.0" }, "devDependencies": { From 6dabcf5ee5823aa4e42d9e087f08995405fa5aba Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 12 Sep 2013 07:23:11 +1000 Subject: [PATCH 46/53] added to-dos to changes list --- 0.7-changes | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/0.7-changes b/0.7-changes index 430623bd..d05dc6e5 100644 --- a/0.7-changes +++ b/0.7-changes @@ -1,3 +1,5 @@ +changes +======= LogEvent.categoryName -> LogEvent.category Logger is immutable (no setLevel any more) Log levels defined in configure call, nowhere else @@ -15,3 +17,22 @@ loglevelfilter changed to accept a list of log levels it allows appenders that wrap other appenders must reference them by name extracted streams to streamroller extracted date_format.js to date-format +console.log replacement has been removed. + +to-do +===== +documentation pages (gh-pages) +* configuration +* file appenders +* layouts +* optional components +* writing your own appender (use couchdb as example) +readme +* getting started +* typical config - file with max size, file with date rolling +* optional components +fix and publish the optional components +* connect +* smtp +* gelf +* hookio ? From 245b2e3b1a3108600082d810eba018f96ee2a445 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 13 Sep 2013 08:17:22 +1000 Subject: [PATCH 47/53] improved the readme --- README.md | 207 +++++++++++++++++++++++++----------------------------- 1 file changed, 97 insertions(+), 110 deletions(-) diff --git a/README.md b/README.md index 6e479d63..8b6af75e 100644 --- a/README.md +++ b/README.md @@ -1,134 +1,121 @@ # log4js-node [![Build Status](https://secure.travis-ci.org/nomiddlename/log4js-node.png?branch=master)](http://travis-ci.org/nomiddlename/log4js-node) -This is a conversion of the [log4js](http://log4js.berlios.de/index.html) -framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript. +This was a conversion of the [log4js](http://log4js.berlios.de/index.html) +framework to work with [node](http://nodejs.org). It's changed a lot since then, but there are still plenty of the original parts involved. Out of the box it supports the following features: * coloured console logging -* replacement of node's console.log functions (optional) -* file appender, with log rolling based on file size -* SMTP appender -* GELF appender -* hook.io appender -* multiprocess appender (useful when you've got worker processes) -* a logger for connect/express servers +* file appender, with log rolling based on file size or date +* multi-process logging (works fine with node's clusters) * configurable log message layout/patterns * different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.) -NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of node's console.log functions. Do this either by calling `log4js.replaceConsole()` or configuring with an object or json file like this: - -```javascript -{ - appenders: [ - { type: "console" } - ], - replaceConsole: true -} -``` +NOTE: There have been a lot of changes in version 0.7.x, if you're upgrading from an older version, you should read [0.7-changes](http://github.com/nomiddlename/log4js-node/0.7-changes) ## installation -npm install log4js + npm install log4js ## usage Minimalist version: -```javascript -var log4js = require('log4js'); -var logger = log4js.getLogger(); -logger.debug("Some debug messages"); -``` + + var log4js = require('log4js'); + var logger = log4js.getLogger(); + logger.debug("Some debug messages"); + By default, log4js outputs to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see: -```bash -[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages -``` -See example.js for a full example, but here's a snippet (also in fromreadme.js): -```javascript -var log4js = require('log4js'); -//console log is loaded by default, so you won't normally need to do this -//log4js.loadAppender('console'); -log4js.loadAppender('file'); -//log4js.addAppender(log4js.appenders.console()); -log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese'); - -var logger = log4js.getLogger('cheese'); -logger.setLevel('ERROR'); - -logger.trace('Entering cheese testing'); -logger.debug('Got cheese.'); -logger.info('Cheese is Gouda.'); -logger.warn('Cheese is quite smelly.'); -logger.error('Cheese is too ripe!'); -logger.fatal('Cheese was breeding ground for listeria.'); -``` -Output: -```bash -[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe! -[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria. -``` -The first 5 lines of the code above could also be written as: -```javascript -var log4js = require('log4js'); -log4js.configure({ - appenders: [ - { type: 'console' }, - { type: 'file', filename: 'logs/cheese.log', category: 'cheese' } - ] -}); -``` - -## configuration - -You can configure the appenders and log levels manually (as above), or provide a -configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The -configuration file location may also be specified via the environment variable -LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`). -An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`. -By default, the configuration file is checked for changes every 60 seconds, and if changed, reloaded. This allows changes to logging levels to occur without restarting the application. - -To turn off configuration file change checking, configure with: - -```javascript -var log4js = require('log4js'); -log4js.configure('my_log4js_configuration.json', {}); -``` -To specify a different period: - -```javascript -log4js.configure('file.json', { reloadSecs: 300 }); -``` -For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored. - -```javascript -log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' }); -``` -If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file: -```json -#### my_log4js_configuration.json #### -{ - "appenders": [ - { - "type": "file", - "filename": "relative/path/to/log_file.log", - "maxLogSize": 20480, - "backups": 3, - "category": "relative-logger" - }, + + [2010-01-17 11:43:37.987] [DEBUG] default - Some debug messages + +See the examples directory for lots of sample setup and usage code. + +## API +Log4js exposes two public functions: `configure` and `getLogger`. If +you're writing your own appender, your code will get access to some +internal APIs, see +[writing-appenders](http://github.com/nomiddlename/log4js-node/writing-appenders.md). + +### log4js.configure(config) +Configure takes a single argument. If that argument is a string, it is +considered the path to a JSON file containing the configuration +object. If the argument is an object, it must have the following +fields: + +* `appenders` (Object) - this should be a map of named appenders to + their configuration. At least one appender must be defined. +* `categories` (Object) - this should be a map of logger categories to + their levels and configuration. The "default" logger category must + be defined, as this is used to route all log events that do not have + an explicit category defined in the config. Category objects have + two fields: + * `level` - (String) the log level for that category: "trace", + "debug", "info", "warn", "error", "fatal", "off" + * `appenders` - (Array) the list of appender names to which log + events for this category should be sent + +The default configuration for log4js, the one used if `configure` is +not called, looks like this: + { - "type": "file", - "absolute": true, - "filename": "/absolute/path/to/log_file.log", - "maxLogSize": 20480, - "backups": 10, - "category": "absolute-logger" + "appenders": { + "console": { "type": "console" } + }, + "categories": { + "default": { level: "TRACE", appenders: [ "console" ] } + } } - ] -} -``` -Documentation for most of the core appenders can be found on the [wiki](https://github.com/nomiddlename/log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples. + +Use of the default configuration can be overridden by setting the +`LOG4JS_CONFIG` environment variable to the location of a JSON +configuration file. log4js will use this file in preference to the +defaults, if `configure` is not called. An example file can be found +in `test/log4js.json`. An example config file with log rolling is in +`test/with-log-rolling.json`. + +### log4js.getLogger([category]) + +* `category` (String), optional. Category to use for log events + generated by the Logger. + +Returns a Logger instance. Unlike in previous versions, log4js +does not hold a reference to Loggers so feel free to use as many as +you like. + +### Logger + +Loggers provide the following functions: + +* `trace` +* `debug` +* `info` +* `warn` +* `error` +* `fatal` + +All can take a variable list of arguments which are used to construct +a log event. They work the same way as console.log, so you can pass a +format string with placeholders. e.g. + + logger.debug("number of widgets is %d", widgets); + + +## Appenders + +Log4js comes with file appenders included, which can be configured to +roll over based on a time or a file size. Other appenders are +available as separate modules: + +* [log4js-gelf](http://github.com/nomiddlename/log4js-gelf) +* [log4js-smtp](http://github.com/nomiddlename/log4js-smtp) +* [log4js-hookio](http://github.com/nomiddlename/log4js-hookio) + +There's also +[log4js-connect](http://github.com/nomiddlename/log4s-connect), for +logging http access in connect-based servers, like express. ## Documentation See the [wiki](https://github.com/nomiddlename/log4js-node/wiki). Improve the [wiki](https://github.com/nomiddlename/log4js-node/wiki), please. From 491c2709e75c56a01941cb9c9a918ebef74f3b43 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 15 Sep 2013 14:37:01 +1000 Subject: [PATCH 48/53] changed the way appenders are loaded, so that they don't need to include log4js as a direct dependency --- lib/appenders/console.js | 33 +++++---- lib/appenders/dateFile.js | 67 +++++++++-------- lib/appenders/file.js | 112 ++++++++++++++-------------- lib/appenders/logLevelFilter.js | 60 +++++++-------- lib/log4js.js | 27 ++++--- test/clusteredAppender-test.js | 6 +- test/consoleAppender-test.js | 2 +- test/dateFileAppender-test.js | 8 +- test/fileAppender-test.js | 8 +- test/log4js-test.js | 126 ++++++++++++++++++++++++++++---- test/logLevelFilter-test.js | 4 +- writing-appenders.md | 64 ++++++++++++++++ 12 files changed, 341 insertions(+), 176 deletions(-) create mode 100644 writing-appenders.md diff --git a/lib/appenders/console.js b/lib/appenders/console.js index e0e88879..e0501cc6 100644 --- a/lib/appenders/console.js +++ b/lib/appenders/console.js @@ -1,20 +1,21 @@ "use strict"; -var layouts = require('../layouts') -, consoleLog = console.log.bind(console); +var consoleLog = console.log.bind(console); -function consoleAppender (layout) { - layout = layout || layouts.colouredLayout; - return function(loggingEvent) { - consoleLog(layout(loggingEvent)); - }; -} - -function configure(config) { - var layout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); +module.exports = function(layouts, levels) { + + function consoleAppender (layout) { + layout = layout || layouts.colouredLayout; + return function(loggingEvent) { + consoleLog(layout(loggingEvent)); + }; } - return consoleAppender(layout); -} -exports.configure = configure; + return function configure(config) { + var layout; + if (config.layout) { + layout = layouts.layout(config.layout.type, config.layout); + } + return consoleAppender(layout); + }; + +}; diff --git a/lib/appenders/dateFile.js b/lib/appenders/dateFile.js index b6b296de..d32226e9 100644 --- a/lib/appenders/dateFile.js +++ b/lib/appenders/dateFile.js @@ -1,6 +1,5 @@ "use strict"; var streams = require('streamroller') -, layouts = require('../layouts') , path = require('path') , os = require('os') , eol = os.EOL || '\n' @@ -13,42 +12,42 @@ process.on('exit', function() { }); }); -/** - * File appender that rolls files according to a date pattern. - * @filename base filename. - * @pattern the format that will be added to the end of filename when rolling, - * also used to check when to roll files - defaults to '.yyyy-MM-dd' - * @layout layout function for log messages - defaults to basicLayout - */ -function appender(filename, pattern, alwaysIncludePattern, layout) { - layout = layout || layouts.basicLayout; +module.exports = function(layouts, levels) { + /** + * File appender that rolls files according to a date pattern. + * @filename base filename. + * @pattern the format that will be added to the end of filename when rolling, + * also used to check when to roll files - defaults to '.yyyy-MM-dd' + * @layout layout function for log messages - defaults to basicLayout + */ + function appender(filename, pattern, alwaysIncludePattern, layout) { + layout = layout || layouts.basicLayout; + + var logFile = new streams.DateRollingFileStream( + filename, + pattern, + { alwaysIncludePattern: alwaysIncludePattern } + ); + openFiles.push(logFile); + + return function(logEvent) { + logFile.write(layout(logEvent) + eol, "utf8"); + }; - var logFile = new streams.DateRollingFileStream( - filename, - pattern, - { alwaysIncludePattern: alwaysIncludePattern } - ); - openFiles.push(logFile); - - return function(logEvent) { - logFile.write(layout(logEvent) + eol, "utf8"); - }; - -} + } -function configure(config, options) { - var layout; + return function configure(config) { + var layout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } + if (config.layout) { + layout = layouts.layout(config.layout.type, config.layout); + } - if (!config.alwaysIncludePattern) { - config.alwaysIncludePattern = false; - } + if (!config.alwaysIncludePattern) { + config.alwaysIncludePattern = false; + } - return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout); -} + return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout); + }; -exports.appender = appender; -exports.configure = configure; +}; diff --git a/lib/appenders/file.js b/lib/appenders/file.js index b4357187..eddc06cf 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -1,6 +1,5 @@ "use strict"; -var layouts = require('../layouts') -, path = require('path') +var path = require('path') , fs = require('fs') , streams = require('streamroller') , os = require('os') @@ -14,65 +13,66 @@ process.on('exit', function() { }); }); -/** - * File Appender writing the logs to a text file. Supports rolling of logs by size. - * - * @param file file log messages will be written to - * @param layout a function that takes a logevent and returns a string - * (defaults to basicLayout). - * @param logSize - the maximum size (in bytes) for a log file, - * if not provided then logs won't be rotated. - * @param numBackups - the number of log files to keep after logSize - * has been reached (default 5) - */ -function fileAppender (file, layout, logSize, numBackups) { - var bytesWritten = 0; - file = path.normalize(file); - layout = layout || layouts.basicLayout; - numBackups = numBackups === undefined ? 5 : numBackups; - //there has to be at least one backup if logSize has been specified - numBackups = numBackups === 0 ? 1 : numBackups; +module.exports = function(layouts, levels) { - function openTheStream(file, fileSize, numFiles) { - var stream; - if (fileSize) { - stream = new streams.RollingFileStream( - file, - fileSize, - numFiles - ); - } else { - stream = fs.createWriteStream( - file, - { encoding: "utf8", - mode: parseInt('0644', 8), - flags: 'a' } - ); + /** + * File Appender writing the logs to a text file. Supports rolling of logs by size. + * + * @param file file log messages will be written to + * @param layout a function that takes a logevent and returns a string + * (defaults to basicLayout). + * @param logSize - the maximum size (in bytes) for a log file, + * if not provided then logs won't be rotated. + * @param numBackups - the number of log files to keep after logSize + * has been reached (default 5) + */ + function fileAppender (file, layout, logSize, numBackups) { + var bytesWritten = 0; + file = path.normalize(file); + layout = layout || layouts.basicLayout; + numBackups = numBackups === undefined ? 5 : numBackups; + //there has to be at least one backup if logSize has been specified + numBackups = numBackups === 0 ? 1 : numBackups; + + function openTheStream(file, fileSize, numFiles) { + var stream; + if (fileSize) { + stream = new streams.RollingFileStream( + file, + fileSize, + numFiles + ); + } else { + stream = fs.createWriteStream( + file, + { encoding: "utf8", + mode: parseInt('0644', 8), + flags: 'a' } + ); + } + stream.on("error", function (err) { + console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err); + }); + return stream; } - stream.on("error", function (err) { - console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err); - }); - return stream; - } - var logFile = openTheStream(file, logSize, numBackups); + var logFile = openTheStream(file, logSize, numBackups); - // push file to the stack of open handlers - openFiles.push(logFile); + // push file to the stack of open handlers + openFiles.push(logFile); - return function(loggingEvent) { - logFile.write(layout(loggingEvent) + eol, "utf8"); - }; -} - -function configure(config) { - var layout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); + return function(loggingEvent) { + logFile.write(layout(loggingEvent) + eol, "utf8"); + }; } - return fileAppender(config.filename, layout, config.maxLogSize, config.backups); -} + return function configure(config) { + var layout; + if (config.layout) { + layout = layouts.layout(config.layout.type, config.layout); + } + + return fileAppender(config.filename, layout, config.maxLogSize, config.backups); + }; -exports.appender = fileAppender; -exports.configure = configure; +}; diff --git a/lib/appenders/logLevelFilter.js b/lib/appenders/logLevelFilter.js index 2aa89cb5..e4d40e15 100644 --- a/lib/appenders/logLevelFilter.js +++ b/lib/appenders/logLevelFilter.js @@ -1,40 +1,40 @@ "use strict"; -var levels = require('../levels') -, debug = require('debug')('log4js:logLevelFilter') -, log4js = require('../log4js'); +var debug = require('debug')('log4js:logLevelFilter'); -function logLevelFilter(allowedLevels, appender) { - return function(logEvent) { - debug("Checking ", logEvent.level, " against ", allowedLevels); - if (allowedLevels.some(function(item) { return item.level === logEvent.level.level; })) { - debug("Sending ", logEvent, " to appender ", appender); - appender(logEvent); - } - }; -} +module.exports = function(layouts, levels) { -function configure(config, appenderByName) { - if (!Array.isArray(config.allow)) { - throw new Error("No allowed log levels specified."); + function logLevelFilter(allowedLevels, appender) { + return function(logEvent) { + debug("Checking ", logEvent.level, " against ", allowedLevels); + if (allowedLevels.some(function(item) { return item.level === logEvent.level.level; })) { + debug("Sending ", logEvent, " to appender ", appender); + appender(logEvent); + } + }; } - var allowedLevels = config.allow.map(function(allowed) { - var level = levels.toLevel(allowed); - if (!level) { - throw new Error("Unrecognised log level '" + allowed + "'."); + return function configure(config, appenderByName) { + if (!Array.isArray(config.allow)) { + throw new Error("No allowed log levels specified."); } - return level; - }); - if (allowedLevels.length === 0) { - throw new Error("No allowed log levels specified."); - } + var allowedLevels = config.allow.map(function(allowed) { + var level = levels.toLevel(allowed); + if (!level) { + throw new Error("Unrecognised log level '" + allowed + "'."); + } + return level; + }); - if (!config.appender) { - throw new Error("Missing an appender."); - } + if (allowedLevels.length === 0) { + throw new Error("No allowed log levels specified."); + } + + if (!config.appender) { + throw new Error("Missing an appender."); + } - return logLevelFilter(allowedLevels, appenderByName(config.appender)); -} + return logLevelFilter(allowedLevels, appenderByName(config.appender)); + }; -exports.configure = configure; +}; diff --git a/lib/log4js.js b/lib/log4js.js index 48b4ce02..b0bfab4b 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -213,8 +213,9 @@ function validateCategories(cats) { } function clearAppenders () { - debug("clearing appenders"); + debug("clearing appenders and appender makers"); appenders = {}; + appenderMakers = {}; } function appenderByName(name) { @@ -230,7 +231,6 @@ function configureAppenders(appenderMap) { Object.keys(appenderMap).forEach(function(appenderName) { var appender, appenderConfig = appenderMap[appenderName]; loadAppender(appenderConfig.type); - appenderConfig.makers = appenderMakers; try { appenders[appenderName] = appenderMakers[appenderConfig.type]( appenderConfig, @@ -247,21 +247,26 @@ function configureAppenders(appenderMap) { function loadAppender(appender) { var appenderModule; - try { - appenderModule = require('./appenders/' + appender); - } catch (e) { + + if (!appenderMakers[appender]) { + debug("Loading appender ", appender); try { - appenderModule = require(appender); - } catch (err) { - throw new Error("Could not load appender of type '" + appender + "'."); + appenderModule = require('./appenders/' + appender); + } catch (e) { + try { + debug("Appender ", appender, " is not a core log4js appender."); + appenderModule = require(appender); + } catch (err) { + throw new Error("Could not load appender of type '" + appender + "'."); + } } - } - appenderMakers[appender] = appenderModule.configure.bind(appenderModule); + appenderMakers[appender] = appenderModule(layouts, levels); + } } module.exports = { getLogger: getLogger, - configure: configure, + configure: configure }; //set ourselves up diff --git a/test/clusteredAppender-test.js b/test/clusteredAppender-test.js index 511a07dd..46e4d626 100755 --- a/test/clusteredAppender-test.js +++ b/test/clusteredAppender-test.js @@ -30,12 +30,12 @@ describe('log4js in a cluster', function() { cb(worker); } }, - './appenders/console': { - configure: function() { + './appenders/console': function() { + return function() { return function(event) { events.push(event); }; - } + }; } } } diff --git a/test/consoleAppender-test.js b/test/consoleAppender-test.js index c537326b..6f6a020f 100644 --- a/test/consoleAppender-test.js +++ b/test/consoleAppender-test.js @@ -17,7 +17,7 @@ describe('../lib/appenders/console', function() { } } ) - , appender = appenderModule.configure( + , appender = appenderModule(require('../lib/layouts'))( { layout: { type: "messagePassThrough" } } ); diff --git a/test/dateFileAppender-test.js b/test/dateFileAppender-test.js index 38b3d00e..33028cf7 100644 --- a/test/dateFileAppender-test.js +++ b/test/dateFileAppender-test.js @@ -17,7 +17,7 @@ describe('../lib/appenders/dateFile', function() { var files = [], initialListeners; before(function() { - var dateFileAppender = require('../lib/appenders/dateFile'), + var dateFileAppender = require('../lib/appenders/dateFile')({ basicLayout: function() {} }), count = 5, logfile; @@ -25,7 +25,7 @@ describe('../lib/appenders/dateFile', function() { while (count--) { logfile = path.join(__dirname, 'datefa-default-test' + count + '.log'); - dateFileAppender.configure({ + dateFileAppender({ filename: logfile }); files.push(logfile); @@ -69,10 +69,10 @@ describe('../lib/appenders/dateFile', function() { } } } - ); + )({ basicLayout: function() {} }); for (var i=0; i < 5; i += 1) { - dateFileAppender.configure({ + dateFileAppender({ filename: 'test' + i }); } diff --git a/test/fileAppender-test.js b/test/fileAppender-test.js index 20401f13..19538473 100644 --- a/test/fileAppender-test.js +++ b/test/fileAppender-test.js @@ -75,9 +75,9 @@ describe('log4js fileAppender', function() { } } } - ); + )(require('../lib/layouts')); for (var i=0; i < 5; i += 1) { - fileAppender.appender('test' + i, null, 100); + fileAppender({ filename: 'test' + i, maxLogSize: 100 }); } openedFiles.should.not.be.empty; exitListener(); @@ -317,8 +317,8 @@ describe('log4js fileAppender', function() { } } } - ); - fileAppender.configure({ + )(require('../lib/layouts')); + fileAppender({ filename: 'test1.log', maxLogSize: 100 }); diff --git a/test/log4js-test.js b/test/log4js-test.js index d4571f68..196f9af6 100644 --- a/test/log4js-test.js +++ b/test/log4js-test.js @@ -178,10 +178,10 @@ describe('../lib/log4js', function() { '../lib/log4js', { requires: { - 'cheese': { - configure: function() { + 'cheese': function() { + return function() { return function(evt) { events.push(evt); }; - } + }; } } } @@ -201,6 +201,102 @@ describe('../lib/log4js', function() { }); + it('should only load third-party appenders once', function() { + var moduleCalled = 0 + , log4js_sandbox = sandbox.require( + '../lib/log4js', + { + requires: { + 'cheese': function() { + moduleCalled += 1; + return function() { + return function() {}; + }; + } + } + } + ); + log4js_sandbox.configure({ + appenders: { + "thing1": { type: "cheese" }, + "thing2": { type: "cheese" } + }, + categories: { + default: { level: "DEBUG", appenders: [ "thing1", "thing2" ] } + } + }); + + moduleCalled.should.eql(1); + }); + + it('should pass layouts and levels to appender modules', function() { + var layouts + , levels + , log4js_sandbox = sandbox.require( + '../lib/log4js', + { + requires: { + 'cheese': function(arg1, arg2) { + layouts = arg1; + levels = arg2; + return function() { + return function() {}; + }; + } + } + } + ); + log4js_sandbox.configure({ + appenders: { + "thing": { type: "cheese" } + }, + categories: { + "default": { level: "debug", appenders: [ "thing" ] } + } + }); + + layouts.should.have.property("basicLayout"); + levels.should.have.property("toLevel"); + }); + + it('should pass config and appenderByName to appender makers', function() { + var otherAppender = function() { /* I do nothing */ } + , config + , other + , log4js_sandbox = sandbox.require( + '../lib/log4js', + { + requires: { + 'other': function() { + return function() { + return otherAppender; + }; + }, + 'cheese': function() { + return function(arg1, arg2) { + config = arg1; + other = arg2("other"); + return function() {}; + }; + } + } + } + ); + log4js_sandbox.configure({ + appenders: { + "other": { type: "other" }, + "thing": { type: "cheese", something: "something" } + }, + categories: { + default: { level: "debug", appenders: [ "other", "thing" ] } + } + }); + + other.should.equal(otherAppender); + config.should.have.property("something", "something"); + + }); + it('should complain about unknown appenders', function() { (function() { log4js.configure({ @@ -221,10 +317,10 @@ describe('../lib/log4js', function() { '../lib/log4js', { requires: { - 'cheese': { - configure: function() { + 'cheese': function() { + return function() { return function(event) { events.push(event); }; - } + }; } } } @@ -244,10 +340,10 @@ describe('../lib/log4js', function() { '../lib/log4js', { requires: { - 'cheese': { - configure: function() { + 'cheese': function() { + return function() { return function(event) { events.push(event); }; - } + }; } } } @@ -273,10 +369,10 @@ describe('../lib/log4js', function() { '../lib/log4js', { requires: { - 'cheese': { - configure: function() { + 'cheese': function() { + return function() { return function(event) { events.push(event); }; - } + }; } } } @@ -301,10 +397,10 @@ describe('../lib/log4js', function() { '../lib/log4js', { requires: { - './appenders/console': { - configure: function() { + './appenders/console': function() { + return function() { return function(event) { events.push(event); }; - } + }; } } } diff --git a/test/logLevelFilter-test.js b/test/logLevelFilter-test.js index cbc64abc..3cabc4b6 100644 --- a/test/logLevelFilter-test.js +++ b/test/logLevelFilter-test.js @@ -11,8 +11,8 @@ describe('log level filter', function() { var log4js_sandboxed = sandbox.require( '../lib/log4js', { requires: - { './appenders/console': - { configure: function() { return function(evt) { events.push(evt); }; } } + { './appenders/console': function() { + return function() { return function(evt) { events.push(evt); }; }; } } } ); diff --git a/writing-appenders.md b/writing-appenders.md new file mode 100644 index 00000000..7311f17d --- /dev/null +++ b/writing-appenders.md @@ -0,0 +1,64 @@ +Writing Appenders For log4js +============================ + +Loading appenders +----------------- +log4js supports loading appender modules from outside its own code. The [log4js-gelf](http://github.com/nomiddlename/log4js-gelf), [log4js-smtp](http://github.com/nomiddlename/log4js-smtp), and [log4js-hookio](http://github.com/nomiddlename/log4js-hookio) appenders are examples of this. In the configuration for an appender, log4js will first attempt to `require` the module from `./lib/appenders/ + type` within log4js - if that fails, it will `require` just using the type. e.g. + + log4js.configure({ + appenders: { + "custom": { type: "log4js-gelf", hostname: "blah", port: 1234 } + }, + categories: { + "default": { level: "debug", appenders: ["custom"] } + } + }); + +log4js will first attempt to `require('./appenders/' + log4js-gelf)`, this will fail. It will then attempt `require('log4js-gelf')`, which (assuming you have previously run `npm install log4js-gelf`) will pick up the gelf appender. + +Writing your own custom appender +-------------------------------- +This is easiest to explain with an example. Let's assume you want to write a [CouchDB](http://couchdb.apache.org) appender. CouchDB is a document database that you talk to via HTTP and JSON. Our log4js configuration is going to look something like this: + + log4js.configure({ + appenders: { + "couch": { + type: "log4js-couchdb", + url: "http://mycouchhost:5984", + db: "logs", + layout: { + type: "messagePassThrough" + } + } + }, + categories: { + "default": { level: "debug", appenders: ["couch"] } + } + }); + +When processing this configuration, the first thing log4js will do is `require('log4js-couchdb')`. It expects this module to return a function that accepts two arguments + + module.exports = function(layouts, levels) { + ... + }; + +log4js will then call that function, passing in the `layouts` and `levels` sub-modules in case your appender might need to use them. Layouts contains functions which will format a log event as a string in various different ways. Levels contains the definitions of the log levels used by log4js - you might need this for mapping log4js levels to external definitions (the GELF appender does this). These are passed in so that appenders do not need to include a hard dependency on log4js (see below), and so that log4js does not need to expose these modules to the public API. The module function will only be called once per call to `log4js.configure`, even if there are multiple appenders of that type defined. + +The module function should return another function, a configuration function, which will be called for each appender of that type defined in the config. That function should return an appender instance. For our CouchDB example, the calling process is roughly like this: + + couchDbModule = require('log4js-couchdb'); + appenderMaker = couchDbModule(layouts, levels); + appender = appenderMaker({ + type: "log4js-couchdb", + url: "http://mycouchhost:5984", + db: "logs", + layout: { + type: "messagePassThrough" + } + }, appenderByName) + +Note that in addition to our couchdb appender config, the appenderMaker function gets an extra argument: `appenderByName`, a function which returns an appender when passed its name. This is used by appenders that wrap other appenders. The `logLevelFilter` is an example of this use. + +The `layout` portion of the config can be passed directly to `layouts.layout(config.layout)` to generate a layout function. + +The appender function returned after processing your config should just take one argument: a log event. This function will be called for every log event that should be handled by your appender. In our case, with the config above, every log event of DEBUG level and above will be sent to our appender. From 5c13469bf64b9f72d74895f31feeef8653647f70 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 15 Sep 2013 14:46:47 +1000 Subject: [PATCH 49/53] added section on peer dependencies --- writing-appenders.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/writing-appenders.md b/writing-appenders.md index 7311f17d..0268b346 100644 --- a/writing-appenders.md +++ b/writing-appenders.md @@ -62,3 +62,20 @@ Note that in addition to our couchdb appender config, the appenderMaker function The `layout` portion of the config can be passed directly to `layouts.layout(config.layout)` to generate a layout function. The appender function returned after processing your config should just take one argument: a log event. This function will be called for every log event that should be handled by your appender. In our case, with the config above, every log event of DEBUG level and above will be sent to our appender. + +Dependencies +------------ +You should declare which version of log4js your appender works with by +including a "peerDependencies" section in your package.json. e.g. + + { + "name": "my-cool-appender", + "version": "0.0.1", + ... + "peerDependencies": { + "log4js": "0.7.x" + } + } + +For more details on peer dependencies, see +[this blog post](http://blog.nodejs.org/2013/02/07/peer-dependencies/). From aad2bb1c2e1a2fa0cd5f93512387241502e04257 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 16 Sep 2013 07:59:57 +1000 Subject: [PATCH 50/53] improved test coverage --- test/layouts-test.js | 15 +++++++++++++++ test/logLevelFilter-test.js | 26 ++++++++++++++++++++++++-- 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/test/layouts-test.js b/test/layouts-test.js index bcf6b2d4..dea4fb4f 100644 --- a/test/layouts-test.js +++ b/test/layouts-test.js @@ -331,5 +331,20 @@ describe('log4js layouts', function() { assert.ok(layouts.layout("coloured")); assert.ok(layouts.layout("pattern")); }); + + it('should return falsy if a layout does not exist', function() { + assert.ok(!layouts.layout("cheese")); + }); + + it('should pass config to layouts that need it', function() { + var layout = layouts.layout( + "pattern", + { + pattern: "%m" + } + ); + + assert.equal(layout({ data: [ "blah" ] }), "blah"); + }); }); }); diff --git a/test/logLevelFilter-test.js b/test/logLevelFilter-test.js index 3cabc4b6..93760404 100644 --- a/test/logLevelFilter-test.js +++ b/test/logLevelFilter-test.js @@ -12,7 +12,10 @@ describe('log level filter', function() { '../lib/log4js', { requires: { './appenders/console': function() { - return function() { return function(evt) { events.push(evt); }; }; } + return function() { + return function(evt) { events.push(evt); }; + }; + } } } ); @@ -107,7 +110,7 @@ describe('log level filter', function() { "errors": { type: "logLevelFilter", allow: [ "cheese", "biscuits", "ERROR" ], - appender: { type: "console" } + appender: "console" } }, categories: { @@ -116,4 +119,23 @@ describe('log level filter', function() { }); }).should.throw(/Unrecognised log level 'cheese'\./); }); + + it('should complain if the list of levels is empty', function() { + (function() { + log4js.configure({ + appenders: { + "console": { type: "console" }, + "errors": { + type: "logLevelFilter", + allow: [], + appender: "console" + } + }, + categories: { + default: { level: "debug", appenders: [ "errors" ] } + } + }); + }).should.throw(/No allowed log levels specified\./); + }); + }); From d1c6ad6f39405a01d08e15bbb85c666f37403bcd Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Tue, 24 Sep 2013 07:33:33 +1000 Subject: [PATCH 51/53] extra debug for failing module load --- lib/log4js.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/log4js.js b/lib/log4js.js index b0bfab4b..5ad56252 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -257,6 +257,7 @@ function loadAppender(appender) { debug("Appender ", appender, " is not a core log4js appender."); appenderModule = require(appender); } catch (err) { + debug("Error loading appender %s: ", appender, err); throw new Error("Could not load appender of type '" + appender + "'."); } } From a1364040211ac01d3ebf8ed047b2365bc78a1877 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Tue, 24 Sep 2013 07:33:50 +1000 Subject: [PATCH 52/53] bumping the version --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index cec2a668..70b6df2e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "log4js", - "version": "0.6.8", + "version": "0.7.0", "description": "Port of Log4js to work with node.", "keywords": [ "logging", From a16100ba7258da80c874de3cac9c8556b950125b Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 14 Oct 2016 08:19:39 +1100 Subject: [PATCH 53/53] still broken --- .jshintrc | 6 +- examples/fromreadme.js | 11 +- lib/appenders/console.js | 13 +- lib/appenders/hipchat.js | 86 +++++----- lib/layouts.js | 13 +- lib/log4js.js | 72 ++++---- package.json | 1 - test/fileSyncAppender-test.js | 299 ++++++++++++++++++---------------- test/layouts-test.js | 67 ++++---- test/logglyAppender-test.js | 4 +- 10 files changed, 283 insertions(+), 289 deletions(-) diff --git a/.jshintrc b/.jshintrc index 7f2d0948..af9aa466 100644 --- a/.jshintrc +++ b/.jshintrc @@ -3,9 +3,9 @@ "laxcomma": true, "indent": 2, "globalstrict": true, - "maxparams": 5, + "maxparams": 6, "maxdepth": 3, - "maxstatements": 20, + "maxstatements": 25, "maxcomplexity": 5, "maxlen": 100, "globals": { @@ -14,5 +14,5 @@ "before": true, "beforeEach": true, "after": true - } + } } diff --git a/examples/fromreadme.js b/examples/fromreadme.js index 71b399ad..c1a28d05 100644 --- a/examples/fromreadme.js +++ b/examples/fromreadme.js @@ -1,15 +1,8 @@ +"use strict"; //remember to change the require to just 'log4js' if you've npm install'ed it -var log4js = require('./lib/log4js'); -//by default the console appender is loaded -//log4js.loadAppender('console'); -//you'd only need to add the console appender if you -//had previously called log4js.clearAppenders(); -//log4js.addAppender(log4js.appenders.console()); -log4js.loadAppender('file'); -log4js.addAppender(log4js.appenders.file('cheese.log'), 'cheese'); +var log4js = require('../lib/log4js'); var logger = log4js.getLogger('cheese'); -logger.setLevel('ERROR'); logger.trace('Entering cheese testing'); logger.debug('Got cheese.'); diff --git a/lib/appenders/console.js b/lib/appenders/console.js index f09bc057..09299d70 100644 --- a/lib/appenders/console.js +++ b/lib/appenders/console.js @@ -2,7 +2,7 @@ var consoleLog = console.log.bind(console); module.exports = function(layouts, levels) { - + function consoleAppender (layout) { layout = layout || layouts.colouredLayout; return function(loggingEvent) { @@ -18,13 +18,4 @@ module.exports = function(layouts, levels) { return consoleAppender(layout); }; -function configure(config) { - var layout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - return consoleAppender(layout, config.timezoneOffset); -} - -exports.appender = consoleAppender; -exports.configure = configure; +}; diff --git a/lib/appenders/hipchat.js b/lib/appenders/hipchat.js index 3fd65df0..a73d5a58 100644 --- a/lib/appenders/hipchat.js +++ b/lib/appenders/hipchat.js @@ -1,11 +1,7 @@ "use strict"; var hipchat = require('hipchat-notifier'); -var layouts = require('../layouts'); - -exports.name = 'hipchat'; -exports.appender = hipchatAppender; -exports.configure = hipchatConfigure; +// var layouts = require('../layouts'); /** @invoke as @@ -29,62 +25,56 @@ exports.configure = hipchatConfigure; @invoke */ - -function hipchatNotifierResponseCallback(err, response, body){ - if(err) { - throw err; +module.exports = function(layouts, levels) { + function hipchatNotifierResponseCallback(err, response, body){ + if(err) { + throw err; + } } -} -function hipchatAppender(config) { + var notifierLevel = {}; + notifierLevel[levels.TRACE] = "info"; + notifierLevel[levels.DEBUG] = "info"; + notifierLevel[levels.WARN] = "warning"; + notifierLevel[levels.ERROR] = "failure"; + notifierLevel[levels.FATAL] = "failure"; - var notifier = hipchat.make(config.hipchat_room, config.hipchat_token); - - // @lint W074 This function's cyclomatic complexity is too high. (10) - return function(loggingEvent){ - - var notifierFn; + function hipchatAppender(config, appenderByName) { + var notifier = hipchat.make(config.hipchat_room, config.hipchat_token); notifier.setRoom(config.hipchat_room); notifier.setFrom(config.hipchat_from || ''); notifier.setNotify(config.hipchat_notify || false); - if(config.hipchat_host) { + if (config.hipchat_host) { notifier.setHost(config.hipchat_host); } - switch (loggingEvent.level.toString()) { - case "TRACE": - case "DEBUG": - notifierFn = "info"; - break; - case "WARN": - notifierFn = "warning"; - break; - case "ERROR": - case "FATAL": - notifierFn = "failure"; - break; - default: - notifierFn = "success"; - } + var layout = layouts.layout(config.layout); - // @TODO, re-work in timezoneOffset ? - var layoutMessage = config.layout(loggingEvent); + // @lint W074 This function's cyclomatic complexity is too high. (10) + return function(loggingEvent) { - // dispatch hipchat api request, do not return anything - // [overide hipchatNotifierResponseCallback] - notifier[notifierFn](layoutMessage, config.hipchat_response_callback || - hipchatNotifierResponseCallback); - }; -} + var notifierFn = notifierLevel[loggingEvent.level] || "success"; + + // @TODO, re-work in timezoneOffset ? + var layoutMessage = layout(loggingEvent); + + // dispatch hipchat api request, do not return anything + // [overide hipchatNotifierResponseCallback] + notifier[notifierFn](layoutMessage, config.hipchat_response_callback || + hipchatNotifierResponseCallback); + }; + } + + return function hipchatConfigure(config) { + var layout; -function hipchatConfigure(config) { - var layout; + if (!config.layout) { + config.layout = layouts.messagePassThroughLayout; + } - if (!config.layout) { - config.layout = layouts.messagePassThroughLayout; - } + return hipchatAppender(config, layout); + }; - return hipchatAppender(config, layout); -} +}; diff --git a/lib/layouts.js b/lib/layouts.js index 252e4c2d..354b2114 100644 --- a/lib/layouts.js +++ b/lib/layouts.js @@ -1,5 +1,6 @@ "use strict"; var dateFormat = require('date-format') +, semver = require('semver') , os = require('os') , eol = os.EOL || '\n' , util = require('util') @@ -31,7 +32,7 @@ function wrapErrorsWithInspect(items) { if (semver.satisfies(process.version, '>=6')) { return util.format(item); } else { - return util.format(item) + '\n' + item.stack; + return util.format(item) + '\n' + item.stack; } } }; } else { @@ -338,11 +339,11 @@ function patternLayout (pattern, tokens) { } module.exports = { - basicLayout: basicLayout, - messagePassThroughLayout: messagePassThroughLayout, - patternLayout: patternLayout, - colouredLayout: colouredLayout, - coloredLayout: colouredLayout, + basicLayout: basicLayout, + messagePassThroughLayout: messagePassThroughLayout, + patternLayout: patternLayout, + colouredLayout: colouredLayout, + coloredLayout: colouredLayout, layout: function(name, config) { return layoutMakers[name] && layoutMakers[name](config); } diff --git a/lib/log4js.js b/lib/log4js.js index 5ad56252..237e6818 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -100,7 +100,7 @@ cluster.on('fork', function(worker) { }); /** - * Get a logger instance. + * Get a logger instance. * @param {String} category to log to. * @return {Logger} instance of logger for the category * @static @@ -109,7 +109,7 @@ function getLogger (category) { debug("getLogger(", category, ")"); return new Logger( - cluster.isMaster ? dispatch : workerDispatch, + cluster.isMaster ? dispatch : workerDispatch, category || 'default' ); } @@ -126,11 +126,11 @@ function dispatch(event) { debug("event is ", event); var category = categories[event.category] || categories.default; debug( - "category.level[", - category.level, - "] <= ", - event.level, - " ? ", + "category.level[", + category.level, + "] <= ", + event.level, + " ? ", category.level.isLessThanOrEqualTo(event.level) ); @@ -152,32 +152,41 @@ function configure(configurationFileOrObject) { debug("configure(", configurationFileOrObject, ")"); debug("process.env.LOG4JS_CONFIG = ", process.env.LOG4JS_CONFIG); - var filename, config = process.env.LOG4JS_CONFIG || configurationFileOrObject; + var config = validateAndParse(configurationFileOrObject); - debug("config ", config); + debug("config = ", config); - if (!config || !(typeof config === 'string' || typeof config === 'object')) { - throw new Error("You must specify configuration as an object or a filename."); - } + configureAppenders(config.appenders); + validateCategories(config.categories); + categories = config.categories; - if (typeof config === 'string') { - debug("config is string"); - filename = config; - config = load(filename); - } +} - if (!config.appenders || !Object.keys(config.appenders).length) { - throw new Error("You must specify at least one appender."); - } +function validateAndParse(configurationFileOrObject) { + var filename, config = process.env.LOG4JS_CONFIG || configurationFileOrObject; - configureAppenders(config.appenders); + checkThatConfigIsStringOrObject(config); - validateCategories(config.categories); - categories = config.categories; + if (typeof config === 'string') { + debug("config is string"); + filename = config; + config = load(filename); + } + + if (!config.appenders || !Object.keys(config.appenders).length) { + throw new Error("You must specify at least one appender."); + } + return config; } -function validateCategories(cats) { +function checkThatConfigIsStringOrObject(config) { + if (!config || !(typeof config === 'string' || typeof config === 'object')) { + throw new Error("You must specify configuration as an object or a filename."); + } +} + +function validateCategories(cats) { if (!cats || !cats.default) { throw new Error("You must specify an appender for the default category"); } @@ -190,8 +199,8 @@ function validateCategories(cats) { category.level = levels.toLevel(inputLevel); if (!category.level) { throw new Error( - "Level '" + inputLevel + - "' is not valid for category '" + categoryName + + "Level '" + inputLevel + + "' is not valid for category '" + categoryName + "'. Acceptable values are: " + levels.levels.join(', ') + "." ); } @@ -203,8 +212,8 @@ function validateCategories(cats) { category.appenders.forEach(function(appender) { if (!appenders[appender]) { throw new Error( - "Appender '" + appender + - "' for category '" + categoryName + + "Appender '" + appender + + "' for category '" + categoryName + "' does not exist. Known appenders are: " + Object.keys(appenders).join(', ') + "." ); } @@ -233,12 +242,12 @@ function configureAppenders(appenderMap) { loadAppender(appenderConfig.type); try { appenders[appenderName] = appenderMakers[appenderConfig.type]( - appenderConfig, + appenderConfig, appenderByName ); } catch(e) { throw new Error( - "log4js configuration problem for appender '" + appenderName + + "log4js configuration problem for appender '" + appenderName + "'. Error was " + e.stack ); } @@ -262,7 +271,7 @@ function loadAppender(appender) { } } appenderMakers[appender] = appenderModule(layouts, levels); - } + } } module.exports = { @@ -273,4 +282,3 @@ module.exports = { //set ourselves up debug("Starting configuration"); configure(defaultConfig); - diff --git a/package.json b/package.json index 5be69aab..48992875 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,6 @@ }, "scripts": { "pretest": "jshint lib/ test/", - "test": "vows" "test": "mocha --recursive" }, "directories": { diff --git a/test/fileSyncAppender-test.js b/test/fileSyncAppender-test.js index d6e2b29a..7e957b59 100755 --- a/test/fileSyncAppender-test.js +++ b/test/fileSyncAppender-test.js @@ -1,14 +1,11 @@ "use strict"; -var vows = require('vows') -, fs = require('fs') +var fs = require('fs') , path = require('path') , sandbox = require('sandboxed-module') , log4js = require('../lib/log4js') , assert = require('assert') , EOL = require('os').EOL || '\n'; -log4js.clearAppenders(); - function remove(filename) { try { fs.unlinkSync(filename); @@ -17,169 +14,183 @@ function remove(filename) { } } -vows.describe('log4js fileSyncAppender').addBatch({ - 'with default fileSyncAppender settings': { - topic: function() { - var that = this - , testFile = path.join(__dirname, '/fa-default-sync-test.log') - , logger = log4js.getLogger('default-settings'); - remove(testFile); +describe('log4js fileSyncAppender', function() { + describe('with default fileSyncAppender settings', function() { + var that = this + , testFile = path.join(__dirname, '/fa-default-sync-test.log') + , logger = log4js.getLogger('default-settings'); - log4js.clearAppenders(); - log4js.addAppender( - require('../lib/appenders/fileSync').appender(testFile), - 'default-settings' - ); + before(function() { + log4js.configure({ + appenders: { + "testFile": { type: "fileSync", filename: testFile } + }, + categories: { + "default-settings": { appender: "testFile" } + } + }); logger.info("This should be in the file."); + }); - fs.readFile(testFile, "utf8", that.callback); - }, - 'should write log messages to the file': function (err, fileContents) { - assert.include(fileContents, "This should be in the file." + EOL); - }, - 'log messages should be in the basic layout format': function(err, fileContents) { - assert.match( - fileContents, - /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - / - ); - } - }, - 'with a max file size and no backups': { - topic: function() { - var testFile = path.join(__dirname, '/fa-maxFileSize-sync-test.log') - , logger = log4js.getLogger('max-file-size') - , that = this; + after(function() { remove(testFile); - remove(testFile + '.1'); + }); + + it('should write messages to the file', function(done) { + fs.readFile(testFile, "utf8", function(err, contents) { + assert.include(contents, "This should be in the file." + EOL); + done(err); + }); + }); + + it('log messages should be in the basic layout format', function(done) { + fs.readFile(testFile, "utf8", function(err, fileContents) { + assert.match( + fileContents, + /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - / + ); + done(err); + }); + }); + }); + + describe('with a max file size and no backups', function() { + var testFile = path.join(__dirname, '/fa-maxFileSize-sync-test.log') + , logger = log4js.getLogger('max-file-size'); + + before(function() { + //log file of 100 bytes maximum, no backups - log4js.clearAppenders(); - log4js.addAppender( - require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0), - 'max-file-size' - ); + log4js.configure({ + appenders: { + "testFile": { type: "fileSync", filename: testFile, maxLogSize: 100, backups: 0 } + }, + categories: { + "max-file-size": { appender: "testFile" } + } + }); logger.info("This is the first log message."); logger.info("This is an intermediate log message."); logger.info("This is the second log message."); + }); - fs.readFile(testFile, "utf8", that.callback); - }, - 'log file should only contain the second message': function (err, fileContents) { - assert.include(fileContents, "This is the second log message." + EOL); - assert.equal(fileContents.indexOf("This is the first log message."), -1); - }, - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'starting with the test file name should be two': function(err, files) { - //there will always be one backup if you've specified a max log size - var logFiles = files.filter( - function(file) { return file.indexOf('fa-maxFileSize-sync-test.log') > -1; } - ); - assert.equal(logFiles.length, 2); - } - } - }, - 'with a max file size and 2 backups': { - topic: function() { - var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-sync-test.log') - , logger = log4js.getLogger('max-file-size-backups'); + after(function() { remove(testFile); - remove(testFile+'.1'); - remove(testFile+'.2'); + remove(testFile + '.1'); + }); + + it('log file should only contain the second message', function(done) { + fs.readFile(testFile, "utf8", function (err, fileContents) { + assert.include(fileContents, "This is the second log message." + EOL); + assert.equal(fileContents.indexOf("This is the first log message."), -1); + done(err); + }); + }); + + describe('the number of files', function() { + it('starting with the test file name should be two', function(done) { + fs.readdir(__dirname, function(err, files) { + //there will always be one backup if you've specified a max log size + var logFiles = files.filter( + function(file) { return file.indexOf('fa-maxFileSize-sync-test.log') > -1; } + ); + assert.equal(logFiles.length, 2); + done(err); + }); + }); + }); + }); + + describe('with a max file size and 2 backups', function() { + var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-sync-test.log') + , logger = log4js.getLogger('max-file-size-backups'); + before(function() { //log file of 50 bytes maximum, 2 backups - log4js.clearAppenders(); - log4js.addAppender( - require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2), - 'max-file-size-backups' - ); + log4js.configure({ + appenders: { + "testFile": { type: "fileSync", filename: testFile, maxLogSize: 50, backups: 2 } + }, + categories: { + 'max-file-size-backups': { appender: "testFile" } + } + }); logger.info("This is the first log message."); logger.info("This is the second log message."); logger.info("This is the third log message."); logger.info("This is the fourth log message."); - var that = this; + }); + after(function() { + remove(testFile); + remove(testFile+'.1'); + remove(testFile+'.2'); + }); + + it('should produce 3 files, named in sequence', function(done) { fs.readdir(__dirname, function(err, files) { if (files) { - that.callback(null, files.sort()); + var testFiles = files + .sort() + .filter(function(f) { + return f.contains('fa-maxFileSize-with-backups-sync-test.log'); } + ); + testFiles.length.should.equal(3); + assert.deepEqual(testFiles, [ + 'fa-maxFileSize-with-backups-sync-test.log', + 'fa-maxFileSize-with-backups-sync-test.log.1', + 'fa-maxFileSize-with-backups-sync-test.log.2' + ]); + assert.include( + fs.readFileSync(path.join(__dirname, testFiles[0]), "utf8"), + "This is the fourth log message" + ); + assert.include( + fs.readFileSync(path.join(__dirname, testFiles[1]), "utf8"), + "This is the third log message" + ); + assert.include( + fs.readFileSync(path.join(__dirname, testFiles[2]), "utf8"), + "This is the second log message" + ); + done(); } else { - that.callback(err, files); + done(err, files); } }); - }, - 'the log files': { - topic: function(files) { - var logFiles = files.filter( - function(file) { return file.indexOf('fa-maxFileSize-with-backups-sync-test.log') > -1; } - ); - return logFiles; - }, - 'should be 3': function (files) { - assert.equal(files.length, 3); - }, - 'should be named in sequence': function (files) { - assert.deepEqual(files, [ - 'fa-maxFileSize-with-backups-sync-test.log', - 'fa-maxFileSize-with-backups-sync-test.log.1', - 'fa-maxFileSize-with-backups-sync-test.log.2' - ]); - }, - 'and the contents of the first file': { - topic: function(logFiles) { - fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback); - }, - 'should be the last log message': function(contents) { - assert.include(contents, 'This is the fourth log message.'); - } - }, - 'and the contents of the second file': { - topic: function(logFiles) { - fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback); - }, - 'should be the third log message': function(contents) { - assert.include(contents, 'This is the third log message.'); - } - }, - 'and the contents of the third file': { - topic: function(logFiles) { - fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback); - }, - 'should be the second log message': function(contents) { - assert.include(contents, 'This is the second log message.'); - } - } - } - } -}).addBatch({ - 'configure' : { - 'with fileSyncAppender': { - topic: function() { - var log4js = require('../lib/log4js') - , logger; - //this config defines one file appender (to ./tmp-sync-tests.log) - //and sets the log level for "tests" to WARN - log4js.configure({ - appenders: [{ - category: "tests", - type: "file", - filename: "tmp-sync-tests.log", - layout: { type: "messagePassThrough" } - }], - - levels: { tests: "WARN" } - }); - logger = log4js.getLogger('tests'); - logger.info('this should not be written to the file'); - logger.warn('this should be written to the file'); - fs.readFile('tmp-sync-tests.log', 'utf8', this.callback); - }, - 'should load appender configuration from a json file': function(err, contents) { + }); + }); + + describe('configuration', function() { + before(function() { + var log4js = require('../lib/log4js') + , logger; + //this config defines one file appender (to ./tmp-sync-tests.log) + //and sets the log level for "tests" to WARN + log4js.configure({ + appenders: [{ + category: "tests", + type: "file", + filename: "tmp-sync-tests.log", + layout: { type: "messagePassThrough" } + }], + + levels: { tests: "WARN" } + }); + logger = log4js.getLogger('tests'); + logger.info('this should not be written to the file'); + logger.warn('this should be written to the file'); + }); + + it('should load appender configuration from a json file', function(done) { + fs.readFile('tmp-sync-tests.log', 'utf8', function(err, contents) { assert.include(contents, 'this should be written to the file' + EOL); assert.equal(contents.indexOf('this should not be written to the file'), -1); - } - } - } -}).export(module); + done(err); + }); + }); + + }); +}); diff --git a/test/layouts-test.js b/test/layouts-test.js index 9d2201a3..714e6bff 100644 --- a/test/layouts-test.js +++ b/test/layouts-test.js @@ -13,7 +13,7 @@ function test(layout, event, tokens, pattern, value) { describe('log4js layouts', function() { describe('colouredLayout', function() { var layout = require('../lib/layouts').colouredLayout; - + it('should apply level colour codes to output', function() { var output = layout({ data: ["nonsense"], @@ -39,10 +39,10 @@ describe('log4js layouts', function() { }); }); - + describe('messagePassThroughLayout', function() { var layout = require('../lib/layouts').messagePassThroughLayout; - + it('should take a logevent and output only the message', function() { assert.equal(layout({ data: ["nonsense"], @@ -57,9 +57,9 @@ describe('log4js layouts', function() { it('should support the console.log format for the message', function() { assert.equal(layout({ - data: ["thing %d", 1, "cheese"], - startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - category: "cheese", + data: ["thing %d", 1, "cheese"], + startTime: new Date(2010, 11, 5, 14, 18, 30, 45), + category: "cheese", level : { colour: "green", toString: function() { return "ERROR"; } @@ -69,9 +69,9 @@ describe('log4js layouts', function() { it('should output the first item even if it is not a string', function() { assert.equal(layout({ - data: [ { thing: 1} ], - startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - category: "cheese", + data: [ { thing: 1} ], + startTime: new Date(2010, 11, 5, 14, 18, 30, 45), + category: "cheese", level: { colour: "green", toString: function() { return "ERROR"; } @@ -82,11 +82,11 @@ describe('log4js layouts', function() { it('should print the stacks of a passed error objects', function() { assert.ok(Array.isArray( layout({ - data: [ new Error() ], - startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - category: "cheese", + data: [ new Error() ], + startTime: new Date(2010, 11, 5, 14, 18, 30, 45), + category: "cheese", level: { - colour: "green", + colour: "green", toString: function() { return "ERROR"; } } }).match( @@ -95,18 +95,18 @@ describe('log4js layouts', function() { ), 'regexp did not return a match'); }); - describe('with passed augmented errors', function() { + describe('with passed augmented errors', function() { var layoutOutput; before(function() { var e = new Error("My Unique Error Message"); e.augmented = "My Unique attribute value"; e.augObj = { at1: "at2" }; - + layoutOutput = layout({ - data: [ e ], - startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - category: "cheese", + data: [ e ], + startTime: new Date(2010, 11, 5, 14, 18, 30, 45), + category: "cheese", level: { colour: "green", toString: function() { return "ERROR"; } @@ -129,9 +129,9 @@ describe('log4js layouts', function() { assert.ok(Array.isArray(m)); }); }); - + }); - + describe('basicLayout', function() { var layout = require('../lib/layouts').basicLayout , event = { @@ -142,7 +142,7 @@ describe('log4js layouts', function() { toString: function() { return "DEBUG"; } } }; - + it('should take a logevent and output a formatted string', function() { assert.equal(layout(event), "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test"); }); @@ -150,9 +150,10 @@ describe('log4js layouts', function() { it('should output a stacktrace, message if the event has an error attached', function() { var output , lines + , i , error = new Error("Some made-up error") , stack = error.stack.split(/\n/); - + event.data = ['this is a test', error]; output = layout(event); lines = output.split(/\n/); @@ -194,7 +195,7 @@ describe('log4js layouts', function() { ); }); }); - + describe('patternLayout', function() { var event = { data: ['this is a test'], @@ -212,13 +213,13 @@ describe('log4js layouts', function() { }; event.startTime.getTimezoneOffset = function() { return 0; }; - + it('should default to "time logLevel loggerName - message"', function() { test( - layout, - event, - tokens, - null, + layout, + event, + tokens, + null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n" ); }); @@ -316,19 +317,19 @@ describe('log4js layouts', function() { it('%x{testString} should output the string stored in tokens', function() { test(layout, event, tokens, '%x{testString}', 'testStringToken'); }); - + it('%x{testFunction} should output the result of the function stored in tokens', function() { test(layout, event, tokens, '%x{testFunction}', 'testFunctionToken'); }); - + it('%x{doesNotExist} should output the string stored in tokens', function() { test(layout, event, tokens, '%x{doesNotExist}', '%x{doesNotExist}'); }); - + it('%x{fnThatUsesLogEvent} should be able to use the logEvent', function() { test(layout, event, tokens, '%x{fnThatUsesLogEvent}', 'DEBUG'); }); - + it('%x should output the string stored in tokens', function() { test(layout, event, tokens, '%x', '%x'); }); @@ -352,7 +353,7 @@ describe('log4js layouts', function() { it('should pass config to layouts that need it', function() { var layout = layouts.layout( - "pattern", + "pattern", { pattern: "%m" } diff --git a/test/logglyAppender-test.js b/test/logglyAppender-test.js index 688e43ee..e9d391c5 100644 --- a/test/logglyAppender-test.js +++ b/test/logglyAppender-test.js @@ -1,5 +1,5 @@ "use strict"; -var vows = require('vows') +var should = require('should') , assert = require('assert') , log4js = require('../lib/log4js') , sandbox = require('sandboxed-module') @@ -60,7 +60,7 @@ function setupLogging(category, options) { }; } -log4js.clearAppenders(); +zlog4js.clearAppenders(); function setupTaggedLogging() { return setupLogging('loggly', { pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy