repo_name
stringclasses 28
values | pr_number
int64 8
3.71k
| pr_title
stringlengths 3
107
| pr_description
stringlengths 0
60.1k
| author
stringlengths 4
19
| date_created
timestamp[ns, tz=UTC] | date_merged
timestamp[ns, tz=UTC] | previous_commit
stringlengths 40
40
| pr_commit
stringlengths 40
40
| query
stringlengths 5
60.1k
| filepath
stringlengths 7
167
| before_content
stringlengths 0
103M
| after_content
stringlengths 0
103M
| label
int64 -1
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
log4js-node/log4js-node | 1,251 | fix: better file validation | 1. file cannot be a directory | lamweili | 2022-05-22T08:24:06Z | 2022-05-22T08:27:13Z | f53f9750fe0d024b4dc3b0fb11c3d49b976ac039 | e378b5bda4cb3b7551067829af0f325bcf15f27f | fix: better file validation. 1. file cannot be a directory | ./.travis.yml | language: node_js
os:
- linux
- windows
sudo: false
node_js:
- "14"
- "12"
- "10"
- "8"
after_success:
- npm run codecov
| language: node_js
os:
- linux
- windows
sudo: false
node_js:
- "14"
- "12"
- "10"
- "8"
after_success:
- npm run codecov
| -1 |
log4js-node/log4js-node | 1,251 | fix: better file validation | 1. file cannot be a directory | lamweili | 2022-05-22T08:24:06Z | 2022-05-22T08:27:13Z | f53f9750fe0d024b4dc3b0fb11c3d49b976ac039 | e378b5bda4cb3b7551067829af0f325bcf15f27f | fix: better file validation. 1. file cannot be a directory | ./examples/example.js | 'use strict';
const log4js = require('../lib/log4js');
// log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' }
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' }
}
});
// a custom logger outside of the log4js/lib/appenders directory can be accessed like so
// log4js.configure({
// appenders: { outside: { type: 'what/you/would/put/in/require', otherArgs: 'blah' } }
// ...
// });
const logger = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', { some: 'otherObject', useful_for: 'debug purposes' });
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.log('Something funny about cheese.');
logger.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger.error('Cheese %s is too ripe!', 'gouda');
logger.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
| 'use strict';
const log4js = require('../lib/log4js');
// log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' }
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' }
}
});
// a custom logger outside of the log4js/lib/appenders directory can be accessed like so
// log4js.configure({
// appenders: { outside: { type: 'what/you/would/put/in/require', otherArgs: 'blah' } }
// ...
// });
const logger = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', { some: 'otherObject', useful_for: 'debug purposes' });
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.log('Something funny about cheese.');
logger.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger.error('Cheese %s is too ripe!', 'gouda');
logger.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
| -1 |
log4js-node/log4js-node | 1,251 | fix: better file validation | 1. file cannot be a directory | lamweili | 2022-05-22T08:24:06Z | 2022-05-22T08:27:13Z | f53f9750fe0d024b4dc3b0fb11c3d49b976ac039 | e378b5bda4cb3b7551067829af0f325bcf15f27f | fix: better file validation. 1. file cannot be a directory | ./lib/appenders/stderr.js |
function stderrAppender(layout, timezoneOffset) {
return (loggingEvent) => {
process.stderr.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
}
function configure(config, layouts) {
let layout = layouts.colouredLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return stderrAppender(layout, config.timezoneOffset);
}
module.exports.configure = configure;
|
function stderrAppender(layout, timezoneOffset) {
return (loggingEvent) => {
process.stderr.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
}
function configure(config, layouts) {
let layout = layouts.colouredLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return stderrAppender(layout, config.timezoneOffset);
}
module.exports.configure = configure;
| -1 |
log4js-node/log4js-node | 1,251 | fix: better file validation | 1. file cannot be a directory | lamweili | 2022-05-22T08:24:06Z | 2022-05-22T08:27:13Z | f53f9750fe0d024b4dc3b0fb11c3d49b976ac039 | e378b5bda4cb3b7551067829af0f325bcf15f27f | fix: better file validation. 1. file cannot be a directory | ./.git/hooks/applypatch-msg.sample | #!/bin/sh
#
# An example hook script to check the commit log message taken by
# applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit. The hook is
# allowed to edit the commit message file.
#
# To enable this hook, rename this file to "applypatch-msg".
. git-sh-setup
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
:
| #!/bin/sh
#
# An example hook script to check the commit log message taken by
# applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit. The hook is
# allowed to edit the commit message file.
#
# To enable this hook, rename this file to "applypatch-msg".
. git-sh-setup
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
:
| -1 |
log4js-node/log4js-node | 1,251 | fix: better file validation | 1. file cannot be a directory | lamweili | 2022-05-22T08:24:06Z | 2022-05-22T08:27:13Z | f53f9750fe0d024b4dc3b0fb11c3d49b976ac039 | e378b5bda4cb3b7551067829af0f325bcf15f27f | fix: better file validation. 1. file cannot be a directory | ./docs/clustering.md | # Clustering / Multi-process Logging
If you're running log4js in an application that uses [node's core cluster](https://nodejs.org/dist/latest-v8.x/docs/api/cluster.html) then log4js will transparently handle making sure the processes don't try to log at the same time. All logging is done on the master process, with the worker processes sending their log messages to the master via `process.send`. This ensures that you don't get multiple processes trying to write to the same file (or rotate the log files) at the same time.
This can cause problems in some rare circumstances, if you're experiencing weird logging problems, then use the `disableClustering: true` option in your log4js configuration to have every process behave as if it were the master process. Be careful if you're logging to files.
## I'm using PM2, but I'm not getting any logs!
To get log4js working with [PM2](http://pm2.keymetrics.io), you'll need to install the [pm2-intercom](https://www.npmjs.com/package/pm2-intercom) module.
```bash
pm2 install pm2-intercom
```
Then add the value `pm2: true` to your log4js configuration. If you're also using `node-config`, then you'll probably have renamed your `NODE_APP_INSTANCE` environment variable. If so, you'll also need to add `pm2InstanceVar: '<NEW_APP_INSTANCE_ID>'` where `<NEW_APP_INSTANCE_ID>` should be replaced with the new name you gave the instance environment variable.
```javascript
log4js.configure({
appenders: { out: { type: 'stdout'}},
categories: { default: { appenders: ['out'], level: 'info'}},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
```
## I'm using Passenger, but I'm not getting any logs!
[Passenger](https://www.phusionpassenger.com/library/) replaces the node.js core cluster module with a non-functional stub, so you won't see any output using log4js. To fix this, add `disableClustering: true` to your configuration. Again, be careful if you're logging to files.
## I'm not using clustering/pm2/passenger but I do have multiple processes that I'd like to all log to the same place
Ok, you probably want to look at the [tcp-server](tcp-server.md) and [tcp appender](tcp.md) documentation.
| # Clustering / Multi-process Logging
If you're running log4js in an application that uses [node's core cluster](https://nodejs.org/dist/latest-v8.x/docs/api/cluster.html) then log4js will transparently handle making sure the processes don't try to log at the same time. All logging is done on the master process, with the worker processes sending their log messages to the master via `process.send`. This ensures that you don't get multiple processes trying to write to the same file (or rotate the log files) at the same time.
This can cause problems in some rare circumstances, if you're experiencing weird logging problems, then use the `disableClustering: true` option in your log4js configuration to have every process behave as if it were the master process. Be careful if you're logging to files.
## I'm using PM2, but I'm not getting any logs!
To get log4js working with [PM2](http://pm2.keymetrics.io), you'll need to install the [pm2-intercom](https://www.npmjs.com/package/pm2-intercom) module.
```bash
pm2 install pm2-intercom
```
Then add the value `pm2: true` to your log4js configuration. If you're also using `node-config`, then you'll probably have renamed your `NODE_APP_INSTANCE` environment variable. If so, you'll also need to add `pm2InstanceVar: '<NEW_APP_INSTANCE_ID>'` where `<NEW_APP_INSTANCE_ID>` should be replaced with the new name you gave the instance environment variable.
```javascript
log4js.configure({
appenders: { out: { type: 'stdout'}},
categories: { default: { appenders: ['out'], level: 'info'}},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
```
## I'm using Passenger, but I'm not getting any logs!
[Passenger](https://www.phusionpassenger.com/library/) replaces the node.js core cluster module with a non-functional stub, so you won't see any output using log4js. To fix this, add `disableClustering: true` to your configuration. Again, be careful if you're logging to files.
## I'm not using clustering/pm2/passenger but I do have multiple processes that I'd like to all log to the same place
Ok, you probably want to look at the [tcp-server](tcp-server.md) and [tcp appender](tcp.md) documentation.
| -1 |
log4js-node/log4js-node | 1,251 | fix: better file validation | 1. file cannot be a directory | lamweili | 2022-05-22T08:24:06Z | 2022-05-22T08:27:13Z | f53f9750fe0d024b4dc3b0fb11c3d49b976ac039 | e378b5bda4cb3b7551067829af0f325bcf15f27f | fix: better file validation. 1. file cannot be a directory | ./test/tap/pm2-support-test.js | const { test } = require("tap");
const cluster = require("cluster");
const debug = require("debug")("log4js:pm2-test");
// PM2 runs everything as workers
// - no master in the cluster (PM2 acts as master itself)
// - we will simulate that here (avoid having to include PM2 as a dev dep)
if (cluster.isMaster) {
// create two worker forks
// PASS IN NODE_APP_INSTANCE HERE
const appEvents = {};
["0", "1"].forEach(i => {
cluster.fork({ NODE_APP_INSTANCE: i });
});
const messageHandler = (worker, msg) => {
if (worker.type || worker.topic) {
msg = worker;
}
if (msg.type === "testing") {
debug(
`Received testing message from ${msg.instance} with events ${msg.events}`
);
appEvents[msg.instance] = msg.events;
}
// we have to do the re-broadcasting that the pm2-intercom module would do.
if (msg.topic === "log4js:message") {
debug(`Received log message ${msg}`);
for (const id in cluster.workers) {
cluster.workers[id].send(msg);
}
}
};
cluster.on("message", messageHandler);
let count = 0;
cluster.on("exit", () => {
count += 1;
if (count === 2) {
// wait for any IPC messages still to come, because it seems they are slooooow.
setTimeout(() => {
test("PM2 Support", batch => {
batch.test("should not get any events when turned off", t => {
t.notOk(
appEvents["0"].filter(
e => e && e.data[0].indexOf("will not be logged") > -1
).length
);
t.notOk(
appEvents["1"].filter(
e => e && e.data[0].indexOf("will not be logged") > -1
).length
);
t.end();
});
batch.test("should get events on app instance 0", t => {
t.equal(appEvents["0"].length, 2);
t.equal(appEvents["0"][0].data[0], "this should now get logged");
t.equal(appEvents["0"][1].data[0], "this should now get logged");
t.end();
});
batch.test("should not get events on app instance 1", t => {
t.equal(appEvents["1"].length, 0);
t.end();
});
batch.end();
cluster.removeListener("message", messageHandler);
});
}, 1000);
}
});
} else {
const recorder = require("../../lib/appenders/recording");
const log4js = require("../../lib/log4js");
log4js.configure({
appenders: { out: { type: "recording" } },
categories: { default: { appenders: ["out"], level: "info" } }
});
const logger = log4js.getLogger("test");
logger.info(
"this is a test, but without enabling PM2 support it will not be logged"
);
// IPC messages can take a while to get through to start with.
setTimeout(() => {
log4js.shutdown(() => {
log4js.configure({
appenders: { out: { type: "recording" } },
categories: { default: { appenders: ["out"], level: "info" } },
pm2: true
});
const anotherLogger = log4js.getLogger("test");
setTimeout(() => {
anotherLogger.info("this should now get logged");
}, 1000);
// if we're the pm2-master we should wait for the other process to send its log messages
setTimeout(() => {
log4js.shutdown(() => {
const events = recorder.replay();
debug(
`Sending test events ${events} from ${process.env.NODE_APP_INSTANCE}`
);
process.send(
{
type: "testing",
instance: process.env.NODE_APP_INSTANCE,
events
},
() => {
setTimeout(() => {
cluster.worker.disconnect();
}, 1000);
}
);
});
}, 3000);
});
}, 2000);
}
| const { test } = require("tap");
const cluster = require("cluster");
const debug = require("debug")("log4js:pm2-test");
// PM2 runs everything as workers
// - no master in the cluster (PM2 acts as master itself)
// - we will simulate that here (avoid having to include PM2 as a dev dep)
if (cluster.isMaster) {
// create two worker forks
// PASS IN NODE_APP_INSTANCE HERE
const appEvents = {};
["0", "1"].forEach(i => {
cluster.fork({ NODE_APP_INSTANCE: i });
});
const messageHandler = (worker, msg) => {
if (worker.type || worker.topic) {
msg = worker;
}
if (msg.type === "testing") {
debug(
`Received testing message from ${msg.instance} with events ${msg.events}`
);
appEvents[msg.instance] = msg.events;
}
// we have to do the re-broadcasting that the pm2-intercom module would do.
if (msg.topic === "log4js:message") {
debug(`Received log message ${msg}`);
for (const id in cluster.workers) {
cluster.workers[id].send(msg);
}
}
};
cluster.on("message", messageHandler);
let count = 0;
cluster.on("exit", () => {
count += 1;
if (count === 2) {
// wait for any IPC messages still to come, because it seems they are slooooow.
setTimeout(() => {
test("PM2 Support", batch => {
batch.test("should not get any events when turned off", t => {
t.notOk(
appEvents["0"].filter(
e => e && e.data[0].indexOf("will not be logged") > -1
).length
);
t.notOk(
appEvents["1"].filter(
e => e && e.data[0].indexOf("will not be logged") > -1
).length
);
t.end();
});
batch.test("should get events on app instance 0", t => {
t.equal(appEvents["0"].length, 2);
t.equal(appEvents["0"][0].data[0], "this should now get logged");
t.equal(appEvents["0"][1].data[0], "this should now get logged");
t.end();
});
batch.test("should not get events on app instance 1", t => {
t.equal(appEvents["1"].length, 0);
t.end();
});
batch.end();
cluster.removeListener("message", messageHandler);
});
}, 1000);
}
});
} else {
const recorder = require("../../lib/appenders/recording");
const log4js = require("../../lib/log4js");
log4js.configure({
appenders: { out: { type: "recording" } },
categories: { default: { appenders: ["out"], level: "info" } }
});
const logger = log4js.getLogger("test");
logger.info(
"this is a test, but without enabling PM2 support it will not be logged"
);
// IPC messages can take a while to get through to start with.
setTimeout(() => {
log4js.shutdown(() => {
log4js.configure({
appenders: { out: { type: "recording" } },
categories: { default: { appenders: ["out"], level: "info" } },
pm2: true
});
const anotherLogger = log4js.getLogger("test");
setTimeout(() => {
anotherLogger.info("this should now get logged");
}, 1000);
// if we're the pm2-master we should wait for the other process to send its log messages
setTimeout(() => {
log4js.shutdown(() => {
const events = recorder.replay();
debug(
`Sending test events ${events} from ${process.env.NODE_APP_INSTANCE}`
);
process.send(
{
type: "testing",
instance: process.env.NODE_APP_INSTANCE,
events
},
() => {
setTimeout(() => {
cluster.worker.disconnect();
}, 1000);
}
);
});
}, 3000);
});
}, 2000);
}
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./lib/layouts.js | const dateFormat = require('date-format');
const os = require('os');
const util = require('util');
const path = require('path');
const url = require('url');
const debug = require('debug')('log4js:layouts');
const styles = {
// styles
bold: [1, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
// grayscale
white: [37, 39],
grey: [90, 39],
black: [90, 39],
// colors
blue: [34, 39],
cyan: [36, 39],
green: [32, 39],
magenta: [35, 39],
red: [91, 39],
yellow: [33, 39]
};
function colorizeStart(style) {
return style ? `\x1B[${styles[style][0]}m` : '';
}
function colorizeEnd(style) {
return style ? `\x1B[${styles[style][1]}m` : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize(str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
return colorize(
util.format(
'[%s] [%s] %s - ',
dateFormat.asString(loggingEvent.startTime),
loggingEvent.level.toString(),
loggingEvent.categoryName
),
colour
);
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout(loggingEvent) {
return timestampLevelAndCategory(loggingEvent) + util.format(...loggingEvent.data);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout(loggingEvent) {
return timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) + util.format(...loggingEvent.data);
}
function messagePassThroughLayout(loggingEvent) {
return util.format(...loggingEvent.data);
}
function dummyLayout(loggingEvent) {
return loggingEvent.data[0];
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* both padding and truncation can be negative.
* Negative truncation = trunc from end of string
* Positive truncation = trunc from start of string
* Negative padding = pad right
* Positive padding = pad left
*
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in constious formats
* - %% %
* - %n newline
* - %z pid
* - %f filename
* - %l line number
* - %o column postion
* - %s call stack
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* - %X{<tokenname>} add dynamic tokens to your log. Tokens are specified in logger context
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { 'pid' : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @return {Function}
* @param pattern
* @param tokens
* @param timezoneOffset
*
* @authors ['Stephan Strittmatter', 'Jan Schmidle']
*/
function patternLayout(pattern, tokens) {
const TTCC_CONVERSION_PATTERN = '%r %p %c - %m%n';
const regex = /%(-?[0-9]+)?(\.?-?[0-9]+)?([[\]cdhmnprzxXyflos%])(\{([^}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
let loggerName = loggingEvent.categoryName;
if (specifier) {
const precision = parseInt(specifier, 10);
const loggerNameBits = loggerName.split('.');
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits.slice(loggerNameBits.length - precision).join('.');
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
let format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
switch (format) {
case 'ISO8601':
case 'ISO8601_FORMAT':
format = dateFormat.ISO8601_FORMAT;
break;
case 'ISO8601_WITH_TZ_OFFSET':
case 'ISO8601_WITH_TZ_OFFSET_FORMAT':
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
break;
case 'ABSOLUTE':
process.emitWarning(
"Pattern %d{ABSOLUTE} is deprecated in favor of %d{ABSOLUTETIME}. " +
"Please use %d{ABSOLUTETIME} instead.",
"DeprecationWarning", "log4js-node-DEP0003"
);
debug("[log4js-node-DEP0003]",
"DEPRECATION: Pattern %d{ABSOLUTE} is deprecated and replaced by %d{ABSOLUTETIME}.");
// falls through
case 'ABSOLUTETIME':
case 'ABSOLUTETIME_FORMAT':
format = dateFormat.ABSOLUTETIME_FORMAT;
break;
case 'DATE':
process.emitWarning(
"Pattern %d{DATE} is deprecated due to the confusion it causes when used. " +
"Please use %d{DATETIME} instead.",
"DeprecationWarning", "log4js-node-DEP0004"
);
debug("[log4js-node-DEP0004]",
"DEPRECATION: Pattern %d{DATE} is deprecated and replaced by %d{DATETIME}.");
// falls through
case 'DATETIME':
case 'DATETIME_FORMAT':
format = dateFormat.DATETIME_FORMAT;
break;
// no default
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return util.format(...loggingEvent.data);
}
function endOfLine() {
return os.EOL;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime);
}
function startColour(loggingEvent) {
return colorizeStart(loggingEvent.level.colour);
}
function endColour(loggingEvent) {
return colorizeEnd(loggingEvent.level.colour);
}
function percent() {
return '%';
}
function pid(loggingEvent) {
return loggingEvent && loggingEvent.pid ? loggingEvent.pid.toString() : process.pid.toString();
}
function clusterInfo() {
// this used to try to return the master and worker pids,
// but it would never have worked because master pid is not available to workers
// leaving this here to maintain compatibility for patterns
return pid();
}
function userDefined(loggingEvent, specifier) {
if (typeof tokens[specifier] !== 'undefined') {
return typeof tokens[specifier] === 'function' ? tokens[specifier](loggingEvent) : tokens[specifier];
}
return null;
}
function contextDefined(loggingEvent, specifier) {
const resolver = loggingEvent.context[specifier];
if (typeof resolver !== 'undefined') {
return typeof resolver === 'function' ? resolver(loggingEvent) : resolver;
}
return null;
}
function fileName(loggingEvent, specifier) {
let filename = loggingEvent.fileName || '';
// support for ESM as it uses url instead of path for file
/* istanbul ignore next: unsure how to simulate ESM for test coverage */
const convertFileURLToPath = function(filepath) {
const urlPrefix = 'file://';
if (filepath.startsWith(urlPrefix)) {
// https://nodejs.org/api/url.html#urlfileurltopathurl
if (typeof url.fileURLToPath === 'function') {
filepath = url.fileURLToPath(filepath);
}
// backward-compatible for nodejs pre-10.12.0 (without url.fileURLToPath method)
else {
// posix: file:///hello/world/foo.txt -> /hello/world/foo.txt -> /hello/world/foo.txt
// win32: file:///C:/path/foo.txt -> /C:/path/foo.txt -> \C:\path\foo.txt -> C:\path\foo.txt
// win32: file://nas/foo.txt -> //nas/foo.txt -> nas\foo.txt -> \\nas\foo.txt
filepath = path.normalize(filepath.replace(new RegExp(`^${urlPrefix}`), ''));
if (process.platform === 'win32') {
if (filepath.startsWith('\\')) {
filepath = filepath.slice(1);
} else {
filepath = path.sep + path.sep + filepath;
}
}
}
}
return filepath;
};
filename = convertFileURLToPath(filename);
if (specifier) {
const fileDepth = parseInt(specifier, 10);
const fileList = filename.split(path.sep);
if (fileList.length > fileDepth) {
filename = fileList.slice(-fileDepth).join(path.sep);
}
}
return filename;
}
function lineNumber(loggingEvent) {
return loggingEvent.lineNumber ? `${loggingEvent.lineNumber}` : '';
}
function columnNumber(loggingEvent) {
return loggingEvent.columnNumber ? `${loggingEvent.columnNumber}` : '';
}
function callStack(loggingEvent) {
return loggingEvent.callStack || '';
}
const replacers = {
c: categoryName,
d: formatAsDate,
h: hostname,
m: formatMessage,
n: endOfLine,
p: logLevel,
r: startTime,
'[': startColour,
']': endColour,
y: clusterInfo,
z: pid,
'%': percent,
x: userDefined,
X: contextDefined,
f: fileName,
l: lineNumber,
o: columnNumber,
s: callStack
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
let len;
if (truncation) {
len = parseInt(truncation.slice(1), 10);
// negative truncate length means truncate from end of string
return len > 0 ? toTruncate.slice(0, len) : toTruncate.slice(len);
}
return toTruncate;
}
function pad(padding, toPad) {
let len;
if (padding) {
if (padding.charAt(0) === '-') {
len = parseInt(padding.slice(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += ' ';
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = ` ${toPad}`;
}
}
}
return toPad;
}
function truncateAndPad(toTruncAndPad, truncation, padding) {
let replacement = toTruncAndPad;
replacement = truncate(truncation, replacement);
replacement = pad(padding, replacement);
return replacement;
}
return function (loggingEvent) {
let formattedString = '';
let result;
let searchString = pattern;
while ((result = regex.exec(searchString)) !== null) {
// const matchedString = result[0];
const padding = result[1];
const truncation = result[2];
const conversionCharacter = result[3];
const specifier = result[5];
const text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += text.toString();
} else {
// Create a raw replacement string based on the conversion
// character and specifier
const replacement = replaceToken(conversionCharacter, loggingEvent, specifier);
formattedString += truncateAndPad(replacement, truncation, padding);
}
searchString = searchString.slice(result.index + result[0].length);
}
return formattedString;
};
}
const layoutMakers = {
messagePassThrough () {
return messagePassThroughLayout;
},
basic () {
return basicLayout;
},
colored () {
return colouredLayout;
},
coloured () {
return colouredLayout;
},
pattern (config) {
return patternLayout(config && config.pattern, config && config.tokens);
},
dummy () {
return dummyLayout;
}
};
module.exports = {
basicLayout,
messagePassThroughLayout,
patternLayout,
colouredLayout,
coloredLayout: colouredLayout,
dummyLayout,
addLayout (name, serializerGenerator) {
layoutMakers[name] = serializerGenerator;
},
layout (name, config) {
return layoutMakers[name] && layoutMakers[name](config);
}
};
| const dateFormat = require('date-format');
const os = require('os');
const util = require('util');
const path = require('path');
const url = require('url');
const debug = require('debug')('log4js:layouts');
const styles = {
// styles
bold: [1, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
// grayscale
white: [37, 39],
grey: [90, 39],
black: [90, 39],
// colors
blue: [34, 39],
cyan: [36, 39],
green: [32, 39],
magenta: [35, 39],
red: [91, 39],
yellow: [33, 39]
};
function colorizeStart(style) {
return style ? `\x1B[${styles[style][0]}m` : '';
}
function colorizeEnd(style) {
return style ? `\x1B[${styles[style][1]}m` : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize(str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
return colorize(
util.format(
'[%s] [%s] %s - ',
dateFormat.asString(loggingEvent.startTime),
loggingEvent.level.toString(),
loggingEvent.categoryName
),
colour
);
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout(loggingEvent) {
return timestampLevelAndCategory(loggingEvent) + util.format(...loggingEvent.data);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout(loggingEvent) {
return timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) + util.format(...loggingEvent.data);
}
function messagePassThroughLayout(loggingEvent) {
return util.format(...loggingEvent.data);
}
function dummyLayout(loggingEvent) {
return loggingEvent.data[0];
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* both padding and truncation can be negative.
* Negative truncation = trunc from end of string
* Positive truncation = trunc from start of string
* Negative padding = pad right
* Positive padding = pad left
*
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in constious formats
* - %% %
* - %n newline
* - %z pid
* - %f filename
* - %l line number
* - %o column postion
* - %s call stack
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* - %X{<tokenname>} add dynamic tokens to your log. Tokens are specified in logger context
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { 'pid' : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @return {Function}
* @param pattern
* @param tokens
* @param timezoneOffset
*
* @authors ['Stephan Strittmatter', 'Jan Schmidle']
*/
function patternLayout(pattern, tokens) {
const TTCC_CONVERSION_PATTERN = '%r %p %c - %m%n';
const regex = /%(-?[0-9]+)?(\.?-?[0-9]+)?([[\]cdhmnprzxXyflos%])(\{([^}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
let loggerName = loggingEvent.categoryName;
if (specifier) {
const precision = parseInt(specifier, 10);
const loggerNameBits = loggerName.split('.');
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits.slice(loggerNameBits.length - precision).join('.');
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
let format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
switch (format) {
case 'ISO8601':
case 'ISO8601_FORMAT':
format = dateFormat.ISO8601_FORMAT;
break;
case 'ISO8601_WITH_TZ_OFFSET':
case 'ISO8601_WITH_TZ_OFFSET_FORMAT':
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
break;
case 'ABSOLUTE':
process.emitWarning(
"Pattern %d{ABSOLUTE} is deprecated in favor of %d{ABSOLUTETIME}. " +
"Please use %d{ABSOLUTETIME} instead.",
"DeprecationWarning", "log4js-node-DEP0003"
);
debug("[log4js-node-DEP0003]",
"DEPRECATION: Pattern %d{ABSOLUTE} is deprecated and replaced by %d{ABSOLUTETIME}.");
// falls through
case 'ABSOLUTETIME':
case 'ABSOLUTETIME_FORMAT':
format = dateFormat.ABSOLUTETIME_FORMAT;
break;
case 'DATE':
process.emitWarning(
"Pattern %d{DATE} is deprecated due to the confusion it causes when used. " +
"Please use %d{DATETIME} instead.",
"DeprecationWarning", "log4js-node-DEP0004"
);
debug("[log4js-node-DEP0004]",
"DEPRECATION: Pattern %d{DATE} is deprecated and replaced by %d{DATETIME}.");
// falls through
case 'DATETIME':
case 'DATETIME_FORMAT':
format = dateFormat.DATETIME_FORMAT;
break;
// no default
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return util.format(...loggingEvent.data);
}
function endOfLine() {
return os.EOL;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime);
}
function startColour(loggingEvent) {
return colorizeStart(loggingEvent.level.colour);
}
function endColour(loggingEvent) {
return colorizeEnd(loggingEvent.level.colour);
}
function percent() {
return '%';
}
function pid(loggingEvent) {
return loggingEvent && loggingEvent.pid ? loggingEvent.pid.toString() : process.pid.toString();
}
function clusterInfo() {
// this used to try to return the master and worker pids,
// but it would never have worked because master pid is not available to workers
// leaving this here to maintain compatibility for patterns
return pid();
}
function userDefined(loggingEvent, specifier) {
if (typeof tokens[specifier] !== 'undefined') {
return typeof tokens[specifier] === 'function' ? tokens[specifier](loggingEvent) : tokens[specifier];
}
return null;
}
function contextDefined(loggingEvent, specifier) {
const resolver = loggingEvent.context[specifier];
if (typeof resolver !== 'undefined') {
return typeof resolver === 'function' ? resolver(loggingEvent) : resolver;
}
return null;
}
function fileName(loggingEvent, specifier) {
let filename = loggingEvent.fileName || '';
// support for ESM as it uses url instead of path for file
/* istanbul ignore next: unsure how to simulate ESM for test coverage */
const convertFileURLToPath = function (filepath) {
const urlPrefix = 'file://';
if (filepath.startsWith(urlPrefix)) {
// https://nodejs.org/api/url.html#urlfileurltopathurl
if (typeof url.fileURLToPath === 'function') {
filepath = url.fileURLToPath(filepath);
}
// backward-compatible for nodejs pre-10.12.0 (without url.fileURLToPath method)
else {
// posix: file:///hello/world/foo.txt -> /hello/world/foo.txt -> /hello/world/foo.txt
// win32: file:///C:/path/foo.txt -> /C:/path/foo.txt -> \C:\path\foo.txt -> C:\path\foo.txt
// win32: file://nas/foo.txt -> //nas/foo.txt -> nas\foo.txt -> \\nas\foo.txt
filepath = path.normalize(filepath.replace(new RegExp(`^${urlPrefix}`), ''));
if (process.platform === 'win32') {
if (filepath.startsWith('\\')) {
filepath = filepath.slice(1);
} else {
filepath = path.sep + path.sep + filepath;
}
}
}
}
return filepath;
};
filename = convertFileURLToPath(filename);
if (specifier) {
const fileDepth = parseInt(specifier, 10);
const fileList = filename.split(path.sep);
if (fileList.length > fileDepth) {
filename = fileList.slice(-fileDepth).join(path.sep);
}
}
return filename;
}
function lineNumber(loggingEvent) {
return loggingEvent.lineNumber ? `${loggingEvent.lineNumber}` : '';
}
function columnNumber(loggingEvent) {
return loggingEvent.columnNumber ? `${loggingEvent.columnNumber}` : '';
}
function callStack(loggingEvent) {
return loggingEvent.callStack || '';
}
const replacers = {
c: categoryName,
d: formatAsDate,
h: hostname,
m: formatMessage,
n: endOfLine,
p: logLevel,
r: startTime,
'[': startColour,
']': endColour,
y: clusterInfo,
z: pid,
'%': percent,
x: userDefined,
X: contextDefined,
f: fileName,
l: lineNumber,
o: columnNumber,
s: callStack
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
let len;
if (truncation) {
len = parseInt(truncation.slice(1), 10);
// negative truncate length means truncate from end of string
return len > 0 ? toTruncate.slice(0, len) : toTruncate.slice(len);
}
return toTruncate;
}
function pad(padding, toPad) {
let len;
if (padding) {
if (padding.charAt(0) === '-') {
len = parseInt(padding.slice(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += ' ';
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = ` ${toPad}`;
}
}
}
return toPad;
}
function truncateAndPad(toTruncAndPad, truncation, padding) {
let replacement = toTruncAndPad;
replacement = truncate(truncation, replacement);
replacement = pad(padding, replacement);
return replacement;
}
return function (loggingEvent) {
let formattedString = '';
let result;
let searchString = pattern;
while ((result = regex.exec(searchString)) !== null) {
// const matchedString = result[0];
const padding = result[1];
const truncation = result[2];
const conversionCharacter = result[3];
const specifier = result[5];
const text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += text.toString();
} else {
// Create a raw replacement string based on the conversion
// character and specifier
const replacement = replaceToken(conversionCharacter, loggingEvent, specifier);
formattedString += truncateAndPad(replacement, truncation, padding);
}
searchString = searchString.slice(result.index + result[0].length);
}
return formattedString;
};
}
const layoutMakers = {
messagePassThrough () {
return messagePassThroughLayout;
},
basic () {
return basicLayout;
},
colored () {
return colouredLayout;
},
coloured () {
return colouredLayout;
},
pattern (config) {
return patternLayout(config && config.pattern, config && config.tokens);
},
dummy () {
return dummyLayout;
}
};
module.exports = {
basicLayout,
messagePassThroughLayout,
patternLayout,
colouredLayout,
coloredLayout: colouredLayout,
dummyLayout,
addLayout (name, serializerGenerator) {
layoutMakers[name] = serializerGenerator;
},
layout (name, config) {
return layoutMakers[name] && layoutMakers[name](config);
}
};
| 1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./lib/logger.js | /* eslint no-underscore-dangle: ["error", { "allow": ["_log"] }] */
const debug = require("debug")("log4js:logger");
const LoggingEvent = require("./LoggingEvent");
const levels = require("./levels");
const clustering = require("./clustering");
const categories = require("./categories");
const configuration = require("./configuration");
const stackReg = /at (?:(.+)\s+\()?(?:(.+?):(\d+)(?::(\d+))?|([^)]+))\)?/;
function defaultParseCallStack(data, skipIdx = 4) {
try {
const stacklines = data.stack.split("\n").slice(skipIdx);
const lineMatch = stackReg.exec(stacklines[0]);
/* istanbul ignore else: failsafe */
if (lineMatch && lineMatch.length === 6) {
return {
functionName: lineMatch[1],
fileName: lineMatch[2],
lineNumber: parseInt(lineMatch[3], 10),
columnNumber: parseInt(lineMatch[4], 10),
callStack: stacklines.join("\n")
};
} else { // eslint-disable-line no-else-return
// will never get here unless nodejs has changes to Error
console.error('log4js.logger - defaultParseCallStack error'); // eslint-disable-line no-console
}
}
catch (err) {
// will never get error unless nodejs has breaking changes to Error
console.error('log4js.logger - defaultParseCallStack error', err); // eslint-disable-line no-console
}
return null;
}
/**
* Logger to log messages.
* use {@see log4js#getLogger(String)} to get an instance.
*
* @name Logger
* @namespace Log4js
* @param name name of category to log to
* @param level - the loglevel for the category
* @param dispatch - the function which will receive the logevents
*
* @author Stephan Strittmatter
*/
class Logger {
constructor(name) {
if (!name) {
throw new Error("No category provided.");
}
this.category = name;
this.context = {};
this.parseCallStack = defaultParseCallStack;
debug(`Logger created (${this.category}, ${this.level})`);
}
get level() {
return levels.getLevel(
categories.getLevelForCategory(this.category),
levels.OFF
);
}
set level(level) {
categories.setLevelForCategory(
this.category,
levels.getLevel(level, this.level)
);
}
get useCallStack() {
return categories.getEnableCallStackForCategory(this.category);
}
set useCallStack(bool) {
categories.setEnableCallStackForCategory(this.category, bool === true);
}
log(level, ...args) {
let logLevel = levels.getLevel(level);
if (!logLevel) {
this._log(levels.WARN, 'log4js:logger.log: invalid value for log-level as first parameter given: ', level);
logLevel = levels.INFO;
}
if (this.isLevelEnabled(logLevel)) {
this._log(logLevel, args);
}
}
isLevelEnabled(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
}
_log(level, data) {
debug(`sending log data (${level}) to appenders`);
const loggingEvent = new LoggingEvent(
this.category,
level,
data,
this.context,
this.useCallStack && this.parseCallStack(new Error())
);
clustering.send(loggingEvent);
}
addContext(key, value) {
this.context[key] = value;
}
removeContext(key) {
delete this.context[key];
}
clearContext() {
this.context = {};
}
setParseCallStackFunction(parseFunction) {
this.parseCallStack = parseFunction;
}
}
function addLevelMethods(target) {
const level = levels.getLevel(target);
const levelStrLower = level.toString().toLowerCase();
const levelMethod = levelStrLower.replace(/_([a-z])/g, g =>
g[1].toUpperCase()
);
const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
Logger.prototype[`is${isLevelMethod}Enabled`] = function () {
return this.isLevelEnabled(level);
};
Logger.prototype[levelMethod] = function (...args) {
this.log(level, ...args);
};
}
levels.levels.forEach(addLevelMethods);
configuration.addListener(() => {
levels.levels.forEach(addLevelMethods);
});
module.exports = Logger;
| /* eslint no-underscore-dangle: ["error", { "allow": ["_log"] }] */
const debug = require("debug")("log4js:logger");
const LoggingEvent = require("./LoggingEvent");
const levels = require("./levels");
const clustering = require("./clustering");
const categories = require("./categories");
const configuration = require("./configuration");
const stackReg = /at (?:(.+)\s+\()?(?:(.+?):(\d+)(?::(\d+))?|([^)]+))\)?/;
function defaultParseCallStack(data, skipIdx = 4) {
try {
const stacklines = data.stack.split("\n").slice(skipIdx);
const lineMatch = stackReg.exec(stacklines[0]);
/* istanbul ignore else: failsafe */
if (lineMatch && lineMatch.length === 6) {
return {
functionName: lineMatch[1],
fileName: lineMatch[2],
lineNumber: parseInt(lineMatch[3], 10),
columnNumber: parseInt(lineMatch[4], 10),
callStack: stacklines.join("\n")
};
} else { // eslint-disable-line no-else-return
// will never get here unless nodejs has changes to Error
console.error('log4js.logger - defaultParseCallStack error'); // eslint-disable-line no-console
}
}
catch (err) {
// will never get error unless nodejs has breaking changes to Error
console.error('log4js.logger - defaultParseCallStack error', err); // eslint-disable-line no-console
}
return null;
}
/**
* Logger to log messages.
* use {@see log4js#getLogger(String)} to get an instance.
*
* @name Logger
* @namespace Log4js
* @param name name of category to log to
* @param level - the loglevel for the category
* @param dispatch - the function which will receive the logevents
*
* @author Stephan Strittmatter
*/
class Logger {
constructor(name) {
if (!name) {
throw new Error("No category provided.");
}
this.category = name;
this.context = {};
this.parseCallStack = defaultParseCallStack;
debug(`Logger created (${this.category}, ${this.level})`);
}
get level() {
return levels.getLevel(
categories.getLevelForCategory(this.category),
levels.OFF
);
}
set level(level) {
categories.setLevelForCategory(
this.category,
levels.getLevel(level, this.level)
);
}
get useCallStack() {
return categories.getEnableCallStackForCategory(this.category);
}
set useCallStack(bool) {
categories.setEnableCallStackForCategory(this.category, bool === true);
}
log(level, ...args) {
let logLevel = levels.getLevel(level);
if (!logLevel) {
this._log(levels.WARN, ['log4js:logger.log: invalid value for log-level as first parameter given:', level]);
logLevel = levels.INFO;
args = [level, ...args];
}
if (this.isLevelEnabled(logLevel)) {
this._log(logLevel, args);
}
}
isLevelEnabled(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
}
_log(level, data) {
debug(`sending log data (${level}) to appenders`);
const loggingEvent = new LoggingEvent(
this.category,
level,
data,
this.context,
this.useCallStack && this.parseCallStack(new Error())
);
clustering.send(loggingEvent);
}
addContext(key, value) {
this.context[key] = value;
}
removeContext(key) {
delete this.context[key];
}
clearContext() {
this.context = {};
}
setParseCallStackFunction(parseFunction) {
this.parseCallStack = parseFunction;
}
}
function addLevelMethods(target) {
const level = levels.getLevel(target);
const levelStrLower = level.toString().toLowerCase();
const levelMethod = levelStrLower.replace(/_([a-z])/g, g =>
g[1].toUpperCase()
);
const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
Logger.prototype[`is${isLevelMethod}Enabled`] = function () {
return this.isLevelEnabled(level);
};
Logger.prototype[levelMethod] = function (...args) {
this.log(level, ...args);
};
}
levels.levels.forEach(addLevelMethods);
configuration.addListener(() => {
levels.levels.forEach(addLevelMethods);
});
module.exports = Logger;
| 1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/newLevel-test.js | const { test } = require("tap");
const log4js = require("../../lib/log4js");
const recording = require("../../lib/appenders/recording");
test("../../lib/logger", batch => {
batch.beforeEach(() => {
recording.reset();
});
batch.test("creating a new log level", t => {
log4js.configure({
levels: {
DIAG: { value: 6000, colour: "green" }
},
appenders: {
stdout: { type: "stdout" }
},
categories: {
default: { appenders: ["stdout"], level: "trace" }
}
});
const logger = log4js.getLogger();
t.test("should export new log level in levels module", assert => {
assert.ok(log4js.levels.DIAG);
assert.equal(log4js.levels.DIAG.levelStr, "DIAG");
assert.equal(log4js.levels.DIAG.level, 6000);
assert.equal(log4js.levels.DIAG.colour, "green");
assert.end();
});
t.type(
logger.diag,
"function",
"should create named function on logger prototype"
);
t.type(
logger.isDiagEnabled,
"function",
"should create isLevelEnabled function on logger prototype"
);
t.type(logger.info, "function", "should retain default levels");
t.end();
});
batch.test("creating a new log level with underscores", t => {
log4js.configure({
levels: {
NEW_LEVEL_OTHER: { value: 6000, colour: "blue" }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
const logger = log4js.getLogger();
t.test("should export new log level to levels module", assert => {
assert.ok(log4js.levels.NEW_LEVEL_OTHER);
assert.equal(log4js.levels.NEW_LEVEL_OTHER.levelStr, "NEW_LEVEL_OTHER");
assert.equal(log4js.levels.NEW_LEVEL_OTHER.level, 6000);
assert.equal(log4js.levels.NEW_LEVEL_OTHER.colour, "blue");
assert.end();
});
t.type(
logger.newLevelOther,
"function",
"should create named function on logger prototype in camel case"
);
t.type(
logger.isNewLevelOtherEnabled,
"function",
"should create named isLevelEnabled function on logger prototype in camel case"
);
t.end();
});
batch.test("creating log events containing newly created log level", t => {
log4js.configure({
levels: {
LVL1: { value: 6000, colour: "grey" },
LVL2: { value: 5000, colour: "magenta" }
},
appenders: { recorder: { type: "recording" } },
categories: {
default: { appenders: ["recorder"], level: "LVL1" }
}
});
const logger = log4js.getLogger();
logger.log(log4js.levels.getLevel("LVL1", log4js.levels.DEBUG), "Event 1");
logger.log(log4js.levels.getLevel("LVL1"), "Event 2");
logger.log("LVL1", "Event 3");
logger.lvl1("Event 4");
logger.lvl2("Event 5");
const events = recording.replay();
t.test("should show log events with new log level", assert => {
assert.equal(events[0].level.toString(), "LVL1");
assert.equal(events[0].data[0], "Event 1");
assert.equal(events[1].level.toString(), "LVL1");
assert.equal(events[1].data[0], "Event 2");
assert.equal(events[2].level.toString(), "LVL1");
assert.equal(events[2].data[0], "Event 3");
assert.equal(events[3].level.toString(), "LVL1");
assert.equal(events[3].data[0], "Event 4");
assert.end();
});
t.equal(
events.length,
4,
"should not be present if min log level is greater than newly created level"
);
t.end();
});
batch.test("creating a new log level with incorrect parameters", t => {
t.throws(() => {
log4js.configure({
levels: {
cheese: { value: "biscuits" }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level "cheese".value must have an integer value');
t.throws(() => {
log4js.configure({
levels: {
cheese: "biscuits"
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level "cheese" must be an object');
t.throws(() => {
log4js.configure({
levels: {
cheese: { thing: "biscuits" }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, "level \"cheese\" must have a 'value' property");
t.throws(() => {
log4js.configure({
levels: {
cheese: { value: 3 }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, "level \"cheese\" must have a 'colour' property");
t.throws(() => {
log4js.configure({
levels: {
cheese: { value: 3, colour: "pants" }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level "cheese".colour must be one of white, grey, black, blue, cyan, green, magenta, red, yellow');
t.throws(() => {
log4js.configure({
levels: {
"#pants": 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.throws(() => {
log4js.configure({
levels: {
"thing#pants": 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "thing#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.throws(() => {
log4js.configure({
levels: {
"1pants": 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "1pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.throws(() => {
log4js.configure({
levels: {
2: 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "2" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.throws(() => {
log4js.configure({
levels: {
"cheese!": 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "cheese!" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.end();
});
batch.test("calling log with an undefined log level", t => {
log4js.configure({
appenders: { recorder: { type: "recording" } },
categories: { default: { appenders: ["recorder"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.log("LEVEL_DOES_NEXT_EXIST", "Event 1");
logger.log(log4js.levels.getLevel("LEVEL_DOES_NEXT_EXIST"), "Event 2");
const events = recording.replay();
t.equal(events[0].level.toString(), "WARN", "should log warning");
t.equal(events[1].level.toString(), "INFO", "should fall back to INFO");
t.equal(events[2].level.toString(), "WARN", "should log warning");
t.equal(events[3].level.toString(), "INFO", "should fall back to INFO");
t.end();
});
batch.test("creating a new level with an existing level name", t => {
log4js.configure({
levels: {
info: { value: 1234, colour: "blue" }
},
appenders: { recorder: { type: "recording" } },
categories: { default: { appenders: ["recorder"], level: "all" } }
});
t.equal(
log4js.levels.INFO.level,
1234,
"should override the existing log level"
);
t.equal(
log4js.levels.INFO.colour,
"blue",
"should override the existing log level"
);
const logger = log4js.getLogger();
logger.info("test message");
const events = recording.replay();
t.equal(
events[0].level.level,
1234,
"should override the existing log level"
);
t.end();
});
batch.end();
});
| const { test } = require("tap");
const log4js = require("../../lib/log4js");
const recording = require("../../lib/appenders/recording");
test("../../lib/logger", batch => {
batch.beforeEach(() => {
recording.reset();
});
batch.test("creating a new log level", t => {
log4js.configure({
levels: {
DIAG: { value: 6000, colour: "green" }
},
appenders: {
stdout: { type: "stdout" }
},
categories: {
default: { appenders: ["stdout"], level: "trace" }
}
});
const logger = log4js.getLogger();
t.test("should export new log level in levels module", assert => {
assert.ok(log4js.levels.DIAG);
assert.equal(log4js.levels.DIAG.levelStr, "DIAG");
assert.equal(log4js.levels.DIAG.level, 6000);
assert.equal(log4js.levels.DIAG.colour, "green");
assert.end();
});
t.type(
logger.diag,
"function",
"should create named function on logger prototype"
);
t.type(
logger.isDiagEnabled,
"function",
"should create isLevelEnabled function on logger prototype"
);
t.type(logger.info, "function", "should retain default levels");
t.end();
});
batch.test("creating a new log level with underscores", t => {
log4js.configure({
levels: {
NEW_LEVEL_OTHER: { value: 6000, colour: "blue" }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
const logger = log4js.getLogger();
t.test("should export new log level to levels module", assert => {
assert.ok(log4js.levels.NEW_LEVEL_OTHER);
assert.equal(log4js.levels.NEW_LEVEL_OTHER.levelStr, "NEW_LEVEL_OTHER");
assert.equal(log4js.levels.NEW_LEVEL_OTHER.level, 6000);
assert.equal(log4js.levels.NEW_LEVEL_OTHER.colour, "blue");
assert.end();
});
t.type(
logger.newLevelOther,
"function",
"should create named function on logger prototype in camel case"
);
t.type(
logger.isNewLevelOtherEnabled,
"function",
"should create named isLevelEnabled function on logger prototype in camel case"
);
t.end();
});
batch.test("creating log events containing newly created log level", t => {
log4js.configure({
levels: {
LVL1: { value: 6000, colour: "grey" },
LVL2: { value: 5000, colour: "magenta" }
},
appenders: { recorder: { type: "recording" } },
categories: {
default: { appenders: ["recorder"], level: "LVL1" }
}
});
const logger = log4js.getLogger();
logger.log(log4js.levels.getLevel("LVL1", log4js.levels.DEBUG), "Event 1");
logger.log(log4js.levels.getLevel("LVL1"), "Event 2");
logger.log("LVL1", "Event 3");
logger.lvl1("Event 4");
logger.lvl2("Event 5");
const events = recording.replay();
t.test("should show log events with new log level", assert => {
assert.equal(events[0].level.toString(), "LVL1");
assert.equal(events[0].data[0], "Event 1");
assert.equal(events[1].level.toString(), "LVL1");
assert.equal(events[1].data[0], "Event 2");
assert.equal(events[2].level.toString(), "LVL1");
assert.equal(events[2].data[0], "Event 3");
assert.equal(events[3].level.toString(), "LVL1");
assert.equal(events[3].data[0], "Event 4");
assert.end();
});
t.equal(
events.length,
4,
"should not be present if min log level is greater than newly created level"
);
t.end();
});
batch.test("creating a new log level with incorrect parameters", t => {
t.throws(() => {
log4js.configure({
levels: {
cheese: { value: "biscuits" }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level "cheese".value must have an integer value');
t.throws(() => {
log4js.configure({
levels: {
cheese: "biscuits"
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level "cheese" must be an object');
t.throws(() => {
log4js.configure({
levels: {
cheese: { thing: "biscuits" }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, "level \"cheese\" must have a 'value' property");
t.throws(() => {
log4js.configure({
levels: {
cheese: { value: 3 }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, "level \"cheese\" must have a 'colour' property");
t.throws(() => {
log4js.configure({
levels: {
cheese: { value: 3, colour: "pants" }
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level "cheese".colour must be one of white, grey, black, blue, cyan, green, magenta, red, yellow');
t.throws(() => {
log4js.configure({
levels: {
"#pants": 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.throws(() => {
log4js.configure({
levels: {
"thing#pants": 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "thing#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.throws(() => {
log4js.configure({
levels: {
"1pants": 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "1pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.throws(() => {
log4js.configure({
levels: {
2: 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "2" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.throws(() => {
log4js.configure({
levels: {
"cheese!": 3
},
appenders: { stdout: { type: "stdout" } },
categories: { default: { appenders: ["stdout"], level: "trace" } }
});
}, 'level name "cheese!" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
t.end();
});
batch.test("calling log with an undefined log level", t => {
log4js.configure({
appenders: { recorder: { type: "recording" } },
categories: { default: { appenders: ["recorder"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.log("LEVEL_DOES_NOT_EXIST", "Event 1");
logger.log(log4js.levels.getLevel("LEVEL_DOES_NOT_EXIST"), "Event 2");
logger.log("Event 3");
const events = recording.replay();
t.equal(events[0].level.toString(), "WARN", "should log warning");
t.equal(events[0].data[0], "log4js:logger.log: invalid value for log-level as first parameter given:");
t.equal(events[0].data[1], "LEVEL_DOES_NOT_EXIST");
t.equal(events[1].level.toString(), "INFO", "should fall back to INFO");
t.equal(events[1].data[0], "LEVEL_DOES_NOT_EXIST");
t.equal(events[1].data[1], "Event 1");
t.equal(events[2].level.toString(), "WARN", "should log warning");
t.equal(events[2].data[0], "log4js:logger.log: invalid value for log-level as first parameter given:");
t.equal(events[2].data[1], undefined);
t.equal(events[3].level.toString(), "INFO", "should fall back to INFO");
t.equal(events[3].data[0], undefined);
t.equal(events[3].data[1], "Event 2");
t.equal(events[4].level.toString(), "WARN", "should log warning");
t.equal(events[4].data[0], "log4js:logger.log: invalid value for log-level as first parameter given:");
t.equal(events[4].data[1], "Event 3");
t.equal(events[5].level.toString(), "INFO", "should fall back to INFO");
t.equal(events[5].data[0], "Event 3");
t.end();
});
batch.test("creating a new level with an existing level name", t => {
log4js.configure({
levels: {
info: { value: 1234, colour: "blue" }
},
appenders: { recorder: { type: "recording" } },
categories: { default: { appenders: ["recorder"], level: "all" } }
});
t.equal(
log4js.levels.INFO.level,
1234,
"should override the existing log level"
);
t.equal(
log4js.levels.INFO.colour,
"blue",
"should override the existing log level"
);
const logger = log4js.getLogger();
logger.info("test message");
const events = recording.replay();
t.equal(
events[0].level.level,
1234,
"should override the existing log level"
);
t.end();
});
batch.end();
});
| 1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/pm2.js | const log4js = require('../lib/log4js');
// NOTE: for PM2 support to work you'll need to install the pm2-intercom module
// `pm2 install pm2-intercom`
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' }
},
categories: {
default: { appenders: ['out'], level: 'info' }
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
const logger = log4js.getLogger('app');
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info('last bubbles', process.env.INSTANCE_ID);
// give pm2 time to set everything up, before we tear it down
setTimeout(() => {
log4js.shutdown(() => {
console.error('All done, shutdown cb returned.');
});
}, 5000);
| const log4js = require('../lib/log4js');
// NOTE: for PM2 support to work you'll need to install the pm2-intercom module
// `pm2 install pm2-intercom`
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' }
},
categories: {
default: { appenders: ['out'], level: 'info' }
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
const logger = log4js.getLogger('app');
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info('last bubbles', process.env.INSTANCE_ID);
// give pm2 time to set everything up, before we tear it down
setTimeout(() => {
log4js.shutdown(() => {
console.error('All done, shutdown cb returned.');
});
}, 5000);
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/memory-test.js | const log4js = require('../lib/log4js');
log4js.configure(
{
appenders: {
logs: {
type: 'file',
filename: 'memory-test.log'
},
console: {
type: 'stdout',
},
file: {
type: 'file',
filename: 'memory-usage.log',
layout: {
type: 'messagePassThrough'
}
}
},
categories: {
default: { appenders: ['console'], level: 'info' },
'memory-test': { appenders: ['logs'], level: 'info' },
'memory-usage': { appenders: ['console', 'file'], level: 'info' }
}
}
);
const logger = log4js.getLogger('memory-test');
const usage = log4js.getLogger('memory-usage');
for (let i = 0; i < 1000000; i += 1) {
if ((i % 5000) === 0) {
usage.info('%d %d', i, process.memoryUsage().rss);
}
logger.info('Doing something.');
}
log4js.shutdown(() => {});
| const log4js = require('../lib/log4js');
log4js.configure(
{
appenders: {
logs: {
type: 'file',
filename: 'memory-test.log'
},
console: {
type: 'stdout',
},
file: {
type: 'file',
filename: 'memory-usage.log',
layout: {
type: 'messagePassThrough'
}
}
},
categories: {
default: { appenders: ['console'], level: 'info' },
'memory-test': { appenders: ['logs'], level: 'info' },
'memory-usage': { appenders: ['console', 'file'], level: 'info' }
}
}
);
const logger = log4js.getLogger('memory-test');
const usage = log4js.getLogger('memory-usage');
for (let i = 0; i < 1000000; i += 1) {
if ((i % 5000) === 0) {
usage.info('%d %d', i, process.memoryUsage().rss);
}
logger.info('Doing something.');
}
log4js.shutdown(() => {});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/rabbitmq-appender.js | // Note that rabbitmq appender needs install amqplib to work.
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console'
},
file: {
type: 'dateFile',
filename: 'logs/log.txt',
pattern: 'yyyyMMdd',
alwaysIncludePattern: false
},
mq: {
type: '@log4js-node/rabbitmq',
host: '127.0.0.1',
port: 5672,
username: 'guest',
password: 'guest',
routing_key: 'logstash',
exchange: 'exchange_logs',
mq_type: 'direct',
durable: true,
layout: {
type: 'pattern',
pattern: '%d{yyyy-MM-dd hh:mm:ss:SSS}#%p#%m'
}
}
},
categories: {
default: { appenders: ['out'], level: 'info' },
dateFile: { appenders: ['file'], level: 'info' },
rabbitmq: { appenders: ['mq'], level: 'info' }
}
});
const log = log4js.getLogger('console');
const logRabbitmq = log4js.getLogger('rabbitmq');
function doTheLogging(x) {
log.info('Logging something %d', x);
logRabbitmq.info('Logging something %d', x);
}
for (let i = 0; i < 500; i += 1) {
doTheLogging(i);
}
| // Note that rabbitmq appender needs install amqplib to work.
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console'
},
file: {
type: 'dateFile',
filename: 'logs/log.txt',
pattern: 'yyyyMMdd',
alwaysIncludePattern: false
},
mq: {
type: '@log4js-node/rabbitmq',
host: '127.0.0.1',
port: 5672,
username: 'guest',
password: 'guest',
routing_key: 'logstash',
exchange: 'exchange_logs',
mq_type: 'direct',
durable: true,
layout: {
type: 'pattern',
pattern: '%d{yyyy-MM-dd hh:mm:ss:SSS}#%p#%m'
}
}
},
categories: {
default: { appenders: ['out'], level: 'info' },
dateFile: { appenders: ['file'], level: 'info' },
rabbitmq: { appenders: ['mq'], level: 'info' }
}
});
const log = log4js.getLogger('console');
const logRabbitmq = log4js.getLogger('rabbitmq');
function doTheLogging(x) {
log.info('Logging something %d', x);
logRabbitmq.info('Logging something %d', x);
}
for (let i = 0; i < 500; i += 1) {
doTheLogging(i);
}
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/passenger-test.js | const { test } = require("tap");
const sandbox = require("@log4js-node/sandboxed-module");
// passenger provides a non-functional cluster module,
// but it does not implement the event emitter functions
const passengerCluster = {
disconnect() {
return false;
},
fork() {
return false;
},
setupMaster() {
return false;
},
isWorker: true,
isMaster: false,
schedulingPolicy: false,
settings: false,
worker: false,
workers: false
};
const vcr = require("../../lib/appenders/recording");
const log4js = sandbox.require("../../lib/log4js", {
requires: {
cluster: passengerCluster,
"./appenders/recording": vcr
}
});
test("When running in Passenger", batch => {
batch.test("it should still log", t => {
log4js.configure({
appenders: {
vcr: { type: "recording" }
},
categories: {
default: { appenders: ["vcr"], level: "info" }
},
disableClustering: true
});
log4js.getLogger().info("This should still work");
const events = vcr.replay();
t.equal(events.length, 1);
t.equal(events[0].data[0], "This should still work");
t.end();
});
batch.end();
});
| const { test } = require("tap");
const sandbox = require("@log4js-node/sandboxed-module");
// passenger provides a non-functional cluster module,
// but it does not implement the event emitter functions
const passengerCluster = {
disconnect() {
return false;
},
fork() {
return false;
},
setupMaster() {
return false;
},
isWorker: true,
isMaster: false,
schedulingPolicy: false,
settings: false,
worker: false,
workers: false
};
const vcr = require("../../lib/appenders/recording");
const log4js = sandbox.require("../../lib/log4js", {
requires: {
cluster: passengerCluster,
"./appenders/recording": vcr
}
});
test("When running in Passenger", batch => {
batch.test("it should still log", t => {
log4js.configure({
appenders: {
vcr: { type: "recording" }
},
categories: {
default: { appenders: ["vcr"], level: "info" }
},
disableClustering: true
});
log4js.getLogger().info("This should still work");
const events = vcr.replay();
t.equal(events.length, 1);
t.equal(events[0].data[0], "This should still work");
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/slack-appender.js | // Note that slack appender needs slack-node package to work.
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
slack: {
type: '@log4js-node/slack',
token: 'TOKEN',
channel_id: '#CHANNEL',
username: 'USERNAME',
format: 'text',
icon_url: 'ICON_URL'
}
},
categories: {
default: { appenders: ['slack'], level: 'info' }
}
});
const logger = log4js.getLogger('slack');
logger.warn('Test Warn message');
logger.info('Test Info message');
logger.debug('Test Debug Message');
logger.trace('Test Trace Message');
logger.fatal('Test Fatal Message');
logger.error('Test Error Message');
| // Note that slack appender needs slack-node package to work.
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
slack: {
type: '@log4js-node/slack',
token: 'TOKEN',
channel_id: '#CHANNEL',
username: 'USERNAME',
format: 'text',
icon_url: 'ICON_URL'
}
},
categories: {
default: { appenders: ['slack'], level: 'info' }
}
});
const logger = log4js.getLogger('slack');
logger.warn('Test Warn message');
logger.info('Test Info message');
logger.debug('Test Debug Message');
logger.trace('Test Trace Message');
logger.fatal('Test Fatal Message');
logger.error('Test Error Message');
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/dateFileAppender-test.js | /* eslint max-classes-per-file: ["error", 3] */
const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const EOL = require("os").EOL || "\n";
const format = require("date-format");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
function removeFile(filename) {
try {
fs.unlinkSync(path.join(__dirname, filename));
} catch (e) {
// doesn't matter
}
}
test("../../lib/appenders/dateFile", batch => {
batch.test("with default settings", t => {
const testFile = path.join(__dirname, "date-appender-default.log");
log4js.configure({
appenders: { date: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["date"], level: "DEBUG" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("This should be in the file.");
t.teardown(() => {
removeFile("date-appender-default.log");
});
setTimeout(() => {
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, "This should be in the file");
t.match(
contents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
}, 100);
});
batch.test("configure with dateFileAppender", t => {
log4js.configure({
appenders: {
date: {
type: "dateFile",
filename: "test/tap/date-file-test.log",
pattern: "-yyyy-MM-dd",
layout: { type: "messagePassThrough" }
}
},
categories: { default: { appenders: ["date"], level: "WARN" } }
});
const logger = log4js.getLogger("tests");
logger.info("this should not be written to the file");
logger.warn("this should be written to the file");
log4js.shutdown(() => {
fs.readFile(
path.join(__dirname, "date-file-test.log"),
"utf8",
(err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(
contents.indexOf("this should not be written to the file"),
-1
);
t.end();
}
);
});
t.teardown(() => {
removeFile("date-file-test.log");
});
});
batch.test("configure with options.alwaysIncludePattern", t => {
const options = {
appenders: {
date: {
category: "tests",
type: "dateFile",
filename: "test/tap/date-file-test",
pattern: "yyyy-MM-dd.log",
alwaysIncludePattern: true,
layout: {
type: "messagePassThrough"
}
}
},
categories: { default: { appenders: ["date"], level: "debug" } }
};
const thisTime = format.asString(
options.appenders.date.pattern,
new Date()
);
const testFile = `date-file-test.${thisTime}`;
const existingFile = path.join(
__dirname,
testFile
);
fs.writeFileSync(existingFile, `this is existing data${EOL}`, "utf8");
log4js.configure(options);
const logger = log4js.getLogger("tests");
logger.warn("this should be written to the file with the appended date");
t.teardown(() => {
removeFile(testFile);
});
// wait for filesystem to catch up
log4js.shutdown(() => {
fs.readFile(existingFile, "utf8", (err, contents) => {
t.match(
contents,
"this is existing data",
"should not overwrite the file on open (issue #132)"
);
t.match(
contents,
"this should be written to the file with the appended date"
);
t.end();
});
});
});
batch.test("should flush logs on shutdown", t => {
const testFile = path.join(__dirname, "date-appender-flush.log");
log4js.configure({
appenders: { test: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["test"], level: "trace" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("1");
logger.info("2");
logger.info("3");
t.teardown(() => {
removeFile("date-appender-flush.log");
});
log4js.shutdown(() => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test("should map maxLogSize to maxSize", t => {
const fakeStreamroller = {};
class DateRollingFileStream {
constructor(filename, pattern, options) {
fakeStreamroller.filename = filename;
fakeStreamroller.pattern = pattern;
fakeStreamroller.options = options;
}
on() { } // eslint-disable-line class-methods-use-this
}
fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
const dateFileAppenderModule = sandbox.require(
"../../lib/appenders/dateFile",
{
requires: { streamroller: fakeStreamroller }
}
);
dateFileAppenderModule.configure(
{
filename: "cheese.log",
pattern: "yyyy",
maxLogSize: 100
},
{ basicLayout: () => {} }
);
t.equal(fakeStreamroller.options.maxSize, 100);
t.end();
});
batch.test("handling of writer.writable", t => {
const output = [];
let writable = true;
const DateRollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
on() { // eslint-disable-line class-methods-use-this
}
get writable() { // eslint-disable-line class-methods-use-this
return writable;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
requires: {
streamroller: {
DateRollingFileStream
}
}
});
const appender = dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout(loggingEvent) { return loggingEvent.data; } }
);
t.test("should log when writer.writable=true", assert => {
writable = true;
appender({data: "something to log"});
assert.ok(output.length, 1);
assert.match(output[output.length - 1], "something to log");
assert.end();
});
t.test("should not log when writer.writable=false", assert => {
writable = false;
appender({data: "this should not be logged"});
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], "this should not be logged");
assert.end();
});
t.end();
});
batch.test("when underlying stream errors", t => {
let consoleArgs;
let errorHandler;
const DateRollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === "error") {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
globals: {
console: {
error(...args) {
consoleArgs = args;
}
}
},
requires: {
streamroller: {
DateRollingFileStream
}
}
});
dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: "aargh" });
t.test("should log the error to console.error", assert => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
"log4js.dateFileAppender - Writing to file %s, error happened "
);
assert.equal(consoleArgs[1], "test1.log");
assert.equal(consoleArgs[2].error, "aargh");
assert.end();
});
t.end();
});
batch.end();
});
| /* eslint max-classes-per-file: ["error", 3] */
const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const EOL = require("os").EOL || "\n";
const format = require("date-format");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
function removeFile(filename) {
try {
fs.unlinkSync(path.join(__dirname, filename));
} catch (e) {
// doesn't matter
}
}
test("../../lib/appenders/dateFile", batch => {
batch.test("with default settings", t => {
const testFile = path.join(__dirname, "date-appender-default.log");
log4js.configure({
appenders: { date: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["date"], level: "DEBUG" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("This should be in the file.");
t.teardown(() => {
removeFile("date-appender-default.log");
});
setTimeout(() => {
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, "This should be in the file");
t.match(
contents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
}, 100);
});
batch.test("configure with dateFileAppender", t => {
log4js.configure({
appenders: {
date: {
type: "dateFile",
filename: "test/tap/date-file-test.log",
pattern: "-yyyy-MM-dd",
layout: { type: "messagePassThrough" }
}
},
categories: { default: { appenders: ["date"], level: "WARN" } }
});
const logger = log4js.getLogger("tests");
logger.info("this should not be written to the file");
logger.warn("this should be written to the file");
log4js.shutdown(() => {
fs.readFile(
path.join(__dirname, "date-file-test.log"),
"utf8",
(err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(
contents.indexOf("this should not be written to the file"),
-1
);
t.end();
}
);
});
t.teardown(() => {
removeFile("date-file-test.log");
});
});
batch.test("configure with options.alwaysIncludePattern", t => {
const options = {
appenders: {
date: {
category: "tests",
type: "dateFile",
filename: "test/tap/date-file-test",
pattern: "yyyy-MM-dd.log",
alwaysIncludePattern: true,
layout: {
type: "messagePassThrough"
}
}
},
categories: { default: { appenders: ["date"], level: "debug" } }
};
const thisTime = format.asString(
options.appenders.date.pattern,
new Date()
);
const testFile = `date-file-test.${thisTime}`;
const existingFile = path.join(
__dirname,
testFile
);
fs.writeFileSync(existingFile, `this is existing data${EOL}`, "utf8");
log4js.configure(options);
const logger = log4js.getLogger("tests");
logger.warn("this should be written to the file with the appended date");
t.teardown(() => {
removeFile(testFile);
});
// wait for filesystem to catch up
log4js.shutdown(() => {
fs.readFile(existingFile, "utf8", (err, contents) => {
t.match(
contents,
"this is existing data",
"should not overwrite the file on open (issue #132)"
);
t.match(
contents,
"this should be written to the file with the appended date"
);
t.end();
});
});
});
batch.test("should flush logs on shutdown", t => {
const testFile = path.join(__dirname, "date-appender-flush.log");
log4js.configure({
appenders: { test: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["test"], level: "trace" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("1");
logger.info("2");
logger.info("3");
t.teardown(() => {
removeFile("date-appender-flush.log");
});
log4js.shutdown(() => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test("should map maxLogSize to maxSize", t => {
const fakeStreamroller = {};
class DateRollingFileStream {
constructor(filename, pattern, options) {
fakeStreamroller.filename = filename;
fakeStreamroller.pattern = pattern;
fakeStreamroller.options = options;
}
on() { } // eslint-disable-line class-methods-use-this
}
fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
const dateFileAppenderModule = sandbox.require(
"../../lib/appenders/dateFile",
{
requires: { streamroller: fakeStreamroller }
}
);
dateFileAppenderModule.configure(
{
filename: "cheese.log",
pattern: "yyyy",
maxLogSize: 100
},
{ basicLayout: () => {} }
);
t.equal(fakeStreamroller.options.maxSize, 100);
t.end();
});
batch.test("handling of writer.writable", t => {
const output = [];
let writable = true;
const DateRollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
on() { // eslint-disable-line class-methods-use-this
}
get writable() { // eslint-disable-line class-methods-use-this
return writable;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
requires: {
streamroller: {
DateRollingFileStream
}
}
});
const appender = dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout(loggingEvent) { return loggingEvent.data; } }
);
t.test("should log when writer.writable=true", assert => {
writable = true;
appender({data: "something to log"});
assert.ok(output.length, 1);
assert.match(output[output.length - 1], "something to log");
assert.end();
});
t.test("should not log when writer.writable=false", assert => {
writable = false;
appender({data: "this should not be logged"});
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], "this should not be logged");
assert.end();
});
t.end();
});
batch.test("when underlying stream errors", t => {
let consoleArgs;
let errorHandler;
const DateRollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === "error") {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
globals: {
console: {
error(...args) {
consoleArgs = args;
}
}
},
requires: {
streamroller: {
DateRollingFileStream
}
}
});
dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: "aargh" });
t.test("should log the error to console.error", assert => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
"log4js.dateFileAppender - Writing to file %s, error happened "
);
assert.equal(consoleArgs[1], "test1.log");
assert.equal(consoleArgs[2].error, "aargh");
assert.end();
});
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/log-rolling.js | const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
console: {
type: 'console'
},
file: {
type: 'file',
filename: 'tmp-test.log',
maxLogSize: 1024,
backups: 3
}
},
categories: {
default: { appenders: ['console', 'file'], level: 'info' }
}
});
const log = log4js.getLogger('test');
function doTheLogging(x) {
log.info('Logging something %d', x);
}
let i = 0;
for (; i < 5000; i += 1) {
doTheLogging(i);
}
| const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
console: {
type: 'console'
},
file: {
type: 'file',
filename: 'tmp-test.log',
maxLogSize: 1024,
backups: 3
}
},
categories: {
default: { appenders: ['console', 'file'], level: 'info' }
}
});
const log = log4js.getLogger('test');
function doTheLogging(x) {
log.info('Logging something %d', x);
}
let i = 0;
for (; i < 5000; i += 1) {
doTheLogging(i);
}
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/multiprocess-test.js | const childProcess = require("child_process");
const { test } = require("tap");
const flatted = require("flatted");
const sandbox = require("@log4js-node/sandboxed-module");
const recording = require("../../lib/appenders/recording");
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
},
end() {
fakeNet.closeCalled = true;
}
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: "1.2.3.4",
remotePort: "1234",
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
}
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
test("Multiprocess Appender", async batch => {
batch.beforeEach(() => {
recording.erase();
});
batch.test("worker", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
worker: {
type: "multiprocess",
mode: "worker",
loggerPort: 1234,
loggerHost: "pants"
}
},
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.close();
logger.info("after error, before connect");
fakeNet.cbs.connect();
logger.info("after error, after connect");
logger.error(new Error("Error test"));
const net = fakeNet;
t.test("should open a socket to the loggerPort and loggerHost", assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "pants");
assert.end();
});
t.test(
"should buffer messages written before socket is connected",
assert => {
assert.match(net.data[0], "before connect");
assert.end();
}
);
t.test(
"should write log messages to socket as flatted strings with a terminator string",
assert => {
assert.match(net.data[0], "before connect");
assert.equal(net.data[1], "__LOG4JS__");
assert.match(net.data[2], "after connect");
assert.equal(net.data[3], "__LOG4JS__");
assert.equal(net.encoding, "utf8");
assert.end();
}
);
t.test("should attempt to re-open the socket on error", assert => {
assert.match(net.data[4], "after error, before connect");
assert.equal(net.data[5], "__LOG4JS__");
assert.match(net.data[6], "after error, after connect");
assert.equal(net.data[7], "__LOG4JS__");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.test("should serialize an Error correctly", assert => {
assert.ok(
flatted.parse(net.data[8]).data[0].stack,
`Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(net.data[8]).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.test("worker with timeout", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.timeout();
logger.info("after timeout, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after timeout, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker with error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.error();
logger.info("after error, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after error, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
t.test("should open a socket to localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
batch.test("master", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet,
"./appenders/recording": recording
}
});
log4js.configure({
appenders: {
recorder: { type: "recording" },
master: {
type: "multiprocess",
mode: "master",
loggerPort: 1234,
loggerHost: "server",
appender: "recorder"
}
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
const net = fakeNet;
t.test(
"should listen for log messages on loggerPort and loggerHost",
assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "server");
assert.end();
}
);
t.test("should return the underlying appender", assert => {
log4js
.getLogger()
.info("this should be sent to the actual appender directly");
assert.equal(
recording.replay()[0].data[0],
"this should be sent to the actual appender directly"
);
assert.end();
});
t.test('should log the error on "error" event', assert => {
net.cbs.error(new Error("Expected error"));
const logEvents = recording.replay();
assert.plan(2);
assert.equal(logEvents.length, 1);
assert.equal(
"A worker log process hung up unexpectedly",
logEvents[0].data[0]
);
});
t.test("when a client connects", assert => {
const logString = `${flatted.stringify({
level: { level: 10000, levelStr: "DEBUG" },
data: ["some debug"]
})}__LOG4JS__`;
net.cbs.data(
`${flatted.stringify({
level: { level: 40000, levelStr: "ERROR" },
data: ["an error message"]
})}__LOG4JS__`
);
net.cbs.data(logString.slice(0, 10));
net.cbs.data(logString.slice(10));
net.cbs.data(logString + logString + logString);
net.cbs.end(
`${flatted.stringify({
level: { level: 50000, levelStr: "FATAL" },
data: ["that's all folks"]
})}__LOG4JS__`
);
net.cbs.data("bad message__LOG4JS__");
const logEvents = recording.replay();
// should parse log messages into log events and send to appender
assert.equal(logEvents[0].level.toString(), "ERROR");
assert.equal(logEvents[0].data[0], "an error message");
assert.equal(logEvents[0].remoteAddress, "1.2.3.4");
assert.equal(logEvents[0].remotePort, "1234");
// should parse log messages split into multiple chunks'
assert.equal(logEvents[1].level.toString(), "DEBUG");
assert.equal(logEvents[1].data[0], "some debug");
assert.equal(logEvents[1].remoteAddress, "1.2.3.4");
assert.equal(logEvents[1].remotePort, "1234");
// should parse multiple log messages in a single chunk'
assert.equal(logEvents[2].data[0], "some debug");
assert.equal(logEvents[3].data[0], "some debug");
assert.equal(logEvents[4].data[0], "some debug");
// should handle log messages sent as part of end event'
assert.equal(logEvents[5].data[0], "that's all folks");
// should handle unparseable log messages
assert.equal(logEvents[6].level.toString(), "ERROR");
assert.equal(logEvents[6].categoryName, "log4js");
assert.equal(logEvents[6].data[0], "Unable to parse log:");
assert.equal(logEvents[6].data[1], "bad message");
assert.end();
});
t.end();
});
batch.test("master without actual appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: { master: { type: "multiprocess", mode: "master" } },
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master must have an "appender" defined')
);
t.end();
});
batch.test("master with unknown appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: {
master: { type: "multiprocess", mode: "master", appender: "cheese" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master appender "cheese" not defined')
);
t.end();
});
batch.test("master defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
stdout: { type: "stdout" },
master: { type: "multiprocess", mode: "master", appender: "stdout" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
t.test("should listen for log messages on localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
await batch.test('e2e test', async (assert) => {
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recording: { type: 'recording' },
master: { type: 'multiprocess', mode: 'master', appender: 'recording', loggerPort: 5001 },
},
categories: { default: { appenders: ['recording'], level: 'trace' } },
});
const child = childProcess.fork(
require.resolve('../multiprocess-worker.js'),
['start-multiprocess-worker', '5001'],
{ stdio: 'inherit' }
);
const actualMsg = await new Promise((res, rej) => {
child.on('message', res);
child.on('error', rej);
});
const logEvents = recording.replay();
assert.equal(actualMsg, 'worker is done');
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'Logging from worker');
assert.end();
});
batch.end();
});
| const childProcess = require("child_process");
const { test } = require("tap");
const flatted = require("flatted");
const sandbox = require("@log4js-node/sandboxed-module");
const recording = require("../../lib/appenders/recording");
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
},
end() {
fakeNet.closeCalled = true;
}
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: "1.2.3.4",
remotePort: "1234",
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
}
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
test("Multiprocess Appender", async batch => {
batch.beforeEach(() => {
recording.erase();
});
batch.test("worker", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
worker: {
type: "multiprocess",
mode: "worker",
loggerPort: 1234,
loggerHost: "pants"
}
},
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.close();
logger.info("after error, before connect");
fakeNet.cbs.connect();
logger.info("after error, after connect");
logger.error(new Error("Error test"));
const net = fakeNet;
t.test("should open a socket to the loggerPort and loggerHost", assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "pants");
assert.end();
});
t.test(
"should buffer messages written before socket is connected",
assert => {
assert.match(net.data[0], "before connect");
assert.end();
}
);
t.test(
"should write log messages to socket as flatted strings with a terminator string",
assert => {
assert.match(net.data[0], "before connect");
assert.equal(net.data[1], "__LOG4JS__");
assert.match(net.data[2], "after connect");
assert.equal(net.data[3], "__LOG4JS__");
assert.equal(net.encoding, "utf8");
assert.end();
}
);
t.test("should attempt to re-open the socket on error", assert => {
assert.match(net.data[4], "after error, before connect");
assert.equal(net.data[5], "__LOG4JS__");
assert.match(net.data[6], "after error, after connect");
assert.equal(net.data[7], "__LOG4JS__");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.test("should serialize an Error correctly", assert => {
assert.ok(
flatted.parse(net.data[8]).data[0].stack,
`Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(net.data[8]).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.test("worker with timeout", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.timeout();
logger.info("after timeout, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after timeout, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker with error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.error();
logger.info("after error, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after error, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
t.test("should open a socket to localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
batch.test("master", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet,
"./appenders/recording": recording
}
});
log4js.configure({
appenders: {
recorder: { type: "recording" },
master: {
type: "multiprocess",
mode: "master",
loggerPort: 1234,
loggerHost: "server",
appender: "recorder"
}
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
const net = fakeNet;
t.test(
"should listen for log messages on loggerPort and loggerHost",
assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "server");
assert.end();
}
);
t.test("should return the underlying appender", assert => {
log4js
.getLogger()
.info("this should be sent to the actual appender directly");
assert.equal(
recording.replay()[0].data[0],
"this should be sent to the actual appender directly"
);
assert.end();
});
t.test('should log the error on "error" event', assert => {
net.cbs.error(new Error("Expected error"));
const logEvents = recording.replay();
assert.plan(2);
assert.equal(logEvents.length, 1);
assert.equal(
"A worker log process hung up unexpectedly",
logEvents[0].data[0]
);
});
t.test("when a client connects", assert => {
const logString = `${flatted.stringify({
level: { level: 10000, levelStr: "DEBUG" },
data: ["some debug"]
})}__LOG4JS__`;
net.cbs.data(
`${flatted.stringify({
level: { level: 40000, levelStr: "ERROR" },
data: ["an error message"]
})}__LOG4JS__`
);
net.cbs.data(logString.slice(0, 10));
net.cbs.data(logString.slice(10));
net.cbs.data(logString + logString + logString);
net.cbs.end(
`${flatted.stringify({
level: { level: 50000, levelStr: "FATAL" },
data: ["that's all folks"]
})}__LOG4JS__`
);
net.cbs.data("bad message__LOG4JS__");
const logEvents = recording.replay();
// should parse log messages into log events and send to appender
assert.equal(logEvents[0].level.toString(), "ERROR");
assert.equal(logEvents[0].data[0], "an error message");
assert.equal(logEvents[0].remoteAddress, "1.2.3.4");
assert.equal(logEvents[0].remotePort, "1234");
// should parse log messages split into multiple chunks'
assert.equal(logEvents[1].level.toString(), "DEBUG");
assert.equal(logEvents[1].data[0], "some debug");
assert.equal(logEvents[1].remoteAddress, "1.2.3.4");
assert.equal(logEvents[1].remotePort, "1234");
// should parse multiple log messages in a single chunk'
assert.equal(logEvents[2].data[0], "some debug");
assert.equal(logEvents[3].data[0], "some debug");
assert.equal(logEvents[4].data[0], "some debug");
// should handle log messages sent as part of end event'
assert.equal(logEvents[5].data[0], "that's all folks");
// should handle unparseable log messages
assert.equal(logEvents[6].level.toString(), "ERROR");
assert.equal(logEvents[6].categoryName, "log4js");
assert.equal(logEvents[6].data[0], "Unable to parse log:");
assert.equal(logEvents[6].data[1], "bad message");
assert.end();
});
t.end();
});
batch.test("master without actual appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: { master: { type: "multiprocess", mode: "master" } },
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master must have an "appender" defined')
);
t.end();
});
batch.test("master with unknown appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: {
master: { type: "multiprocess", mode: "master", appender: "cheese" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master appender "cheese" not defined')
);
t.end();
});
batch.test("master defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
stdout: { type: "stdout" },
master: { type: "multiprocess", mode: "master", appender: "stdout" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
t.test("should listen for log messages on localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
await batch.test('e2e test', async (assert) => {
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recording: { type: 'recording' },
master: { type: 'multiprocess', mode: 'master', appender: 'recording', loggerPort: 5001 },
},
categories: { default: { appenders: ['recording'], level: 'trace' } },
});
const child = childProcess.fork(
require.resolve('../multiprocess-worker.js'),
['start-multiprocess-worker', '5001'],
{ stdio: 'inherit' }
);
const actualMsg = await new Promise((res, rej) => {
child.on('message', res);
child.on('error', rej);
});
const logEvents = recording.replay();
assert.equal(actualMsg, 'worker is done');
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'Logging from worker');
assert.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./lib/appenders/tcp-server.js | const debug = require('debug')('log4js:tcp-server');
const net = require('net');
const clustering = require('../clustering');
const LoggingEvent = require('../LoggingEvent');
const DELIMITER = '__LOG4JS__';
exports.configure = (config) => {
debug('configure called with ', config);
const server = net.createServer((socket) => {
let dataSoFar = '';
const send = (data) => {
if (data) {
dataSoFar += data;
if (dataSoFar.indexOf(DELIMITER)) {
const events = dataSoFar.split(DELIMITER);
if (!dataSoFar.endsWith(DELIMITER)) {
dataSoFar = events.pop();
} else {
dataSoFar = '';
}
events.filter(e => e.length).forEach((e) => {
clustering.send(LoggingEvent.deserialise(e));
});
} else {
dataSoFar = '';
}
}
};
socket.setEncoding('utf8');
socket.on('data', send);
socket.on('end', send);
});
server.listen(config.port || 5000, config.host || 'localhost', () => {
debug(`listening on ${config.host || 'localhost'}:${config.port || 5000}`);
server.unref();
});
return {
shutdown: (cb) => {
debug('shutdown called.');
server.close(cb);
}
};
};
| const debug = require('debug')('log4js:tcp-server');
const net = require('net');
const clustering = require('../clustering');
const LoggingEvent = require('../LoggingEvent');
const DELIMITER = '__LOG4JS__';
exports.configure = (config) => {
debug('configure called with ', config);
const server = net.createServer((socket) => {
let dataSoFar = '';
const send = (data) => {
if (data) {
dataSoFar += data;
if (dataSoFar.indexOf(DELIMITER)) {
const events = dataSoFar.split(DELIMITER);
if (!dataSoFar.endsWith(DELIMITER)) {
dataSoFar = events.pop();
} else {
dataSoFar = '';
}
events.filter(e => e.length).forEach((e) => {
clustering.send(LoggingEvent.deserialise(e));
});
} else {
dataSoFar = '';
}
}
};
socket.setEncoding('utf8');
socket.on('data', send);
socket.on('end', send);
});
server.listen(config.port || 5000, config.host || 'localhost', () => {
debug(`listening on ${config.host || 'localhost'}:${config.port || 5000}`);
server.unref();
});
return {
shutdown: (cb) => {
debug('shutdown called.');
server.close(cb);
}
};
};
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/stacktraces-test.js | const { test } = require("tap");
test("Stacktraces from errors in different VM context", t => {
const log4js = require("../../lib/log4js");
const recorder = require("../../lib/appenders/recording");
const layout = require("../../lib/layouts").basicLayout;
const vm = require("vm");
log4js.configure({
appenders: { vcr: { type: "recording" } },
categories: { default: { appenders: ["vcr"], level: "debug" } }
});
const logger = log4js.getLogger();
try {
// Access not defined variable.
vm.runInNewContext("myVar();", {}, "myfile.js");
} catch (e) {
// Expect to have a stack trace printed.
logger.error(e);
}
const events = recorder.replay();
// recording appender events do not go through layouts, so let's do it
const output = layout(events[0]);
t.match(output, "stacktraces-test.js");
t.end();
});
| const { test } = require("tap");
test("Stacktraces from errors in different VM context", t => {
const log4js = require("../../lib/log4js");
const recorder = require("../../lib/appenders/recording");
const layout = require("../../lib/layouts").basicLayout;
const vm = require("vm");
log4js.configure({
appenders: { vcr: { type: "recording" } },
categories: { default: { appenders: ["vcr"], level: "debug" } }
});
const logger = log4js.getLogger();
try {
// Access not defined variable.
vm.runInNewContext("myVar();", {}, "myfile.js");
} catch (e) {
// Expect to have a stack trace printed.
logger.error(e);
}
const events = recorder.replay();
// recording appender events do not go through layouts, so let's do it
const output = layout(events[0]);
t.match(output, "stacktraces-test.js");
t.end();
});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/multiprocess-shutdown-test.js | const { test } = require("tap");
const net = require("net");
const childProcess = require("child_process");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
test("multiprocess appender shutdown (master)", { timeout: 5000 }, t => {
log4js.configure({
appenders: {
stdout: { type: "stdout" },
multi: {
type: "multiprocess",
mode: "master",
loggerPort: 12345,
appender: "stdout"
}
},
categories: { default: { appenders: ["multi"], level: "debug" } }
});
setTimeout(() => {
log4js.shutdown(() => {
setTimeout(() => {
net
.connect({ port: 12345 }, () => {
t.fail("connection should not still work");
t.end();
})
.on("error", err => {
t.ok(err, "we got a connection error");
t.end();
});
}, 1000);
});
}, 1000);
});
test("multiprocess appender shutdown (worker)", t => {
const fakeConnection = {
evts: {},
msgs: [],
on(evt, cb) {
this.evts[evt] = cb;
},
write(data) {
this.msgs.push(data);
},
removeAllListeners() {
this.removeAllListenersCalled = true;
},
end(cb) {
this.endCb = cb;
}
};
const logLib = sandbox.require("../../lib/log4js", {
requires: {
net: {
createConnection() {
return fakeConnection;
}
}
}
});
logLib.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "debug" } }
});
logLib
.getLogger()
.info(
"Putting something in the buffer before the connection is established"
);
// nothing been written yet.
t.equal(fakeConnection.msgs.length, 0);
let shutdownFinished = false;
logLib.shutdown(() => {
shutdownFinished = true;
});
// still nothing been written yet.
t.equal(fakeConnection.msgs.length, 0);
fakeConnection.evts.connect();
setTimeout(() => {
t.equal(fakeConnection.msgs.length, 2);
t.ok(fakeConnection.removeAllListenersCalled);
fakeConnection.endCb();
t.ok(shutdownFinished);
t.end();
}, 500);
});
test("multiprocess appender crash (worker)", t => {
const loggerPort = 12346;
const vcr = require("../../lib/appenders/recording");
log4js.configure({
appenders: {
console: { type: "recording" },
multi: {
type: "multiprocess",
mode: "master",
loggerPort,
appender: "console"
}
},
categories: { default: { appenders: ["multi"], level: "debug" } }
});
const worker = childProcess.fork(require.resolve("../multiprocess-worker"), [
"start-multiprocess-worker",
loggerPort
]);
worker.on("message", m => {
if (m === "worker is done") {
setTimeout(() => {
worker.kill();
t.equal(vcr.replay()[0].data[0], "Logging from worker");
log4js.shutdown(() => t.end());
}, 100);
}
});
});
| const { test } = require("tap");
const net = require("net");
const childProcess = require("child_process");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
test("multiprocess appender shutdown (master)", { timeout: 5000 }, t => {
log4js.configure({
appenders: {
stdout: { type: "stdout" },
multi: {
type: "multiprocess",
mode: "master",
loggerPort: 12345,
appender: "stdout"
}
},
categories: { default: { appenders: ["multi"], level: "debug" } }
});
setTimeout(() => {
log4js.shutdown(() => {
setTimeout(() => {
net
.connect({ port: 12345 }, () => {
t.fail("connection should not still work");
t.end();
})
.on("error", err => {
t.ok(err, "we got a connection error");
t.end();
});
}, 1000);
});
}, 1000);
});
test("multiprocess appender shutdown (worker)", t => {
const fakeConnection = {
evts: {},
msgs: [],
on(evt, cb) {
this.evts[evt] = cb;
},
write(data) {
this.msgs.push(data);
},
removeAllListeners() {
this.removeAllListenersCalled = true;
},
end(cb) {
this.endCb = cb;
}
};
const logLib = sandbox.require("../../lib/log4js", {
requires: {
net: {
createConnection() {
return fakeConnection;
}
}
}
});
logLib.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "debug" } }
});
logLib
.getLogger()
.info(
"Putting something in the buffer before the connection is established"
);
// nothing been written yet.
t.equal(fakeConnection.msgs.length, 0);
let shutdownFinished = false;
logLib.shutdown(() => {
shutdownFinished = true;
});
// still nothing been written yet.
t.equal(fakeConnection.msgs.length, 0);
fakeConnection.evts.connect();
setTimeout(() => {
t.equal(fakeConnection.msgs.length, 2);
t.ok(fakeConnection.removeAllListenersCalled);
fakeConnection.endCb();
t.ok(shutdownFinished);
t.end();
}, 500);
});
test("multiprocess appender crash (worker)", t => {
const loggerPort = 12346;
const vcr = require("../../lib/appenders/recording");
log4js.configure({
appenders: {
console: { type: "recording" },
multi: {
type: "multiprocess",
mode: "master",
loggerPort,
appender: "console"
}
},
categories: { default: { appenders: ["multi"], level: "debug" } }
});
const worker = childProcess.fork(require.resolve("../multiprocess-worker"), [
"start-multiprocess-worker",
loggerPort
]);
worker.on("message", m => {
if (m === "worker is done") {
setTimeout(() => {
worker.kill();
t.equal(vcr.replay()[0].data[0], "Logging from worker");
log4js.shutdown(() => t.end());
}, 100);
}
});
});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/custom-layout.js | const log4js = require('../lib/log4js');
log4js.addLayout('json', config => function (logEvent) {
return JSON.stringify(logEvent) + config.separator;
});
log4js.configure({
appenders: {
out: { type: 'stdout', layout: { type: 'json', separator: ',' } }
},
categories: {
default: { appenders: ['out'], level: 'info' }
}
});
const logger = log4js.getLogger('json-test');
logger.info('this is just a test');
logger.error('of a custom appender');
logger.warn('that outputs json');
log4js.shutdown(() => {});
| const log4js = require('../lib/log4js');
log4js.addLayout('json', config => function (logEvent) {
return JSON.stringify(logEvent) + config.separator;
});
log4js.configure({
appenders: {
out: { type: 'stdout', layout: { type: 'json', separator: ',' } }
},
categories: {
default: { appenders: ['out'], level: 'info' }
}
});
const logger = log4js.getLogger('json-test');
logger.info('this is just a test');
logger.error('of a custom appender');
logger.warn('that outputs json');
log4js.shutdown(() => {});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/file-sighup-test.js | const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const sandbox = require("@log4js-node/sandboxed-module");
const removeFiles = async filenames => {
if (!Array.isArray(filenames))
filenames = [filenames];
const promises = filenames.map(filename => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
test("file appender single SIGHUP handler", t => {
const initialListeners = process.listenerCount("SIGHUP");
let warning;
const originalListener = process.listeners("warning")[process.listeners("warning").length - 1];
const warningListener = error => {
if (error.type === "SIGHUP" && error.name === "MaxListenersExceededWarning") {
warning = error;
return;
}
originalListener(error);
};
process.off("warning", originalListener);
process.on("warning", warningListener);
const config = {
appenders: {},
categories: {
default: { appenders: [], level: 'debug' }
}
};
// create 11 appenders to make nodejs warn for >10 max listeners
const numOfAppenders = 11;
for (let i = 1; i <= numOfAppenders; i++) {
config.appenders[`app${i}`] = { type: 'file', filename: path.join(__dirname, `file${i}.log`) };
config.categories.default.appenders.push(`app${i}`);
}
const log4js = require("../../lib/log4js");
log4js.configure(config);
t.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off("warning", warningListener);
process.on("warning", originalListener);
});
await new Promise(resolve => { log4js.shutdown(resolve); });
const filenames = Object.values(config.appenders).map(appender => appender.filename);
await removeFiles(filenames);
});
t.plan(2);
// next event loop to allow event emitter/listener to happen
setImmediate(() => {
t.notOk(warning, "should not have MaxListenersExceededWarning for SIGHUP");
t.equal(process.listenerCount("SIGHUP") - initialListeners, 1, "should be 1 SIGHUP listener");
t.end();
});
});
test("file appender SIGHUP", t => {
let closeCalled = 0;
let openCalled = 0;
sandbox
.require("../../lib/appenders/file", {
requires: {
streamroller: {
RollingFileStream: class RollingFileStream {
constructor() {
openCalled++;
this.ended = false;
}
on() {
this.dummy = "easier than turning off lint rule";
}
end(cb) {
this.ended = true;
closeCalled++;
cb();
}
write() {
if (this.ended) {
throw new Error("write after end");
}
return true;
}
}
}
}
})
.configure(
{ type: "file", filename: "sighup-test-file" },
{
basicLayout() {
return "whatever";
}
}
);
process.emit("SIGHUP", "SIGHUP", 1);
t.plan(2);
setTimeout(() => {
t.equal(openCalled, 2, "open should be called twice");
t.equal(closeCalled, 1, "close should be called once");
t.end();
}, 100);
});
test("file appender SIGHUP handler leak", t => {
const log4js = require("../../lib/log4js");
const initialListeners = process.listenerCount("SIGHUP");
log4js.configure({
appenders: {
file: { type: "file", filename: "test.log" }
},
categories: { default: { appenders: ["file"], level: "info" } }
});
t.teardown(async () => {
await removeFiles("test.log");
});
t.plan(2);
t.equal(process.listenerCount("SIGHUP"), initialListeners + 1);
log4js.shutdown(() => {
t.equal(process.listenerCount("SIGHUP"), initialListeners);
t.end();
});
}); | const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const sandbox = require("@log4js-node/sandboxed-module");
const removeFiles = async filenames => {
if (!Array.isArray(filenames))
filenames = [filenames];
const promises = filenames.map(filename => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
test("file appender single SIGHUP handler", t => {
const initialListeners = process.listenerCount("SIGHUP");
let warning;
const originalListener = process.listeners("warning")[process.listeners("warning").length - 1];
const warningListener = error => {
if (error.type === "SIGHUP" && error.name === "MaxListenersExceededWarning") {
warning = error;
return;
}
originalListener(error);
};
process.off("warning", originalListener);
process.on("warning", warningListener);
const config = {
appenders: {},
categories: {
default: { appenders: [], level: 'debug' }
}
};
// create 11 appenders to make nodejs warn for >10 max listeners
const numOfAppenders = 11;
for (let i = 1; i <= numOfAppenders; i++) {
config.appenders[`app${i}`] = { type: 'file', filename: path.join(__dirname, `file${i}.log`) };
config.categories.default.appenders.push(`app${i}`);
}
const log4js = require("../../lib/log4js");
log4js.configure(config);
t.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off("warning", warningListener);
process.on("warning", originalListener);
});
await new Promise(resolve => { log4js.shutdown(resolve); });
const filenames = Object.values(config.appenders).map(appender => appender.filename);
await removeFiles(filenames);
});
t.plan(2);
// next event loop to allow event emitter/listener to happen
setImmediate(() => {
t.notOk(warning, "should not have MaxListenersExceededWarning for SIGHUP");
t.equal(process.listenerCount("SIGHUP") - initialListeners, 1, "should be 1 SIGHUP listener");
t.end();
});
});
test("file appender SIGHUP", t => {
let closeCalled = 0;
let openCalled = 0;
sandbox
.require("../../lib/appenders/file", {
requires: {
streamroller: {
RollingFileStream: class RollingFileStream {
constructor() {
openCalled++;
this.ended = false;
}
on() {
this.dummy = "easier than turning off lint rule";
}
end(cb) {
this.ended = true;
closeCalled++;
cb();
}
write() {
if (this.ended) {
throw new Error("write after end");
}
return true;
}
}
}
}
})
.configure(
{ type: "file", filename: "sighup-test-file" },
{
basicLayout() {
return "whatever";
}
}
);
process.emit("SIGHUP", "SIGHUP", 1);
t.plan(2);
setTimeout(() => {
t.equal(openCalled, 2, "open should be called twice");
t.equal(closeCalled, 1, "close should be called once");
t.end();
}, 100);
});
test("file appender SIGHUP handler leak", t => {
const log4js = require("../../lib/log4js");
const initialListeners = process.listenerCount("SIGHUP");
log4js.configure({
appenders: {
file: { type: "file", filename: "test.log" }
},
categories: { default: { appenders: ["file"], level: "info" } }
});
t.teardown(async () => {
await removeFiles("test.log");
});
t.plan(2);
t.equal(process.listenerCount("SIGHUP"), initialListeners + 1);
log4js.shutdown(() => {
t.equal(process.listenerCount("SIGHUP"), initialListeners);
t.end();
});
}); | -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/fileAppender-test.js | /* eslint max-classes-per-file: ["error", 2] */
const { test } = require("tap");
const fs = require("fs-extra");
const path = require("path");
const sandbox = require("@log4js-node/sandboxed-module");
const zlib = require("zlib");
const util = require("util");
const sleep = util.promisify(setTimeout);
const gunzip = util.promisify(zlib.gunzip);
const EOL = require("os").EOL || "\n";
const log4js = require("../../lib/log4js");
const removeFile = async filename => {
try {
await fs.unlink(filename);
} catch (e) {
// let's pretend this never happened
}
};
test("log4js fileAppender", batch => {
batch.test("with default fileAppender settings", async t => {
const testFile = path.join(__dirname, "fa-default-test.log");
const logger = log4js.getLogger("default-settings");
await removeFile(testFile);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await removeFile(testFile);
});
log4js.configure({
appenders: { file: { type: "file", filename: testFile } },
categories: { default: { appenders: ["file"], level: "debug" } }
});
logger.info("This should be in the file.");
await sleep(100);
const fileContents = await fs.readFile(testFile, "utf8");
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.test("should give error if invalid filename", async t => {
const file = "";
const expectedError = new Error(`Invalid filename: ${file}`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "file",
filename: file
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("should flush logs on shutdown", async t => {
const testFile = path.join(__dirname, "fa-default-test.log");
const logger = log4js.getLogger("default-settings");
await removeFile(testFile);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await removeFile(testFile);
});
log4js.configure({
appenders: { test: { type: "file", filename: testFile } },
categories: { default: { appenders: ["test"], level: "trace" } }
});
logger.info("1");
logger.info("2");
logger.info("3");
await new Promise(resolve => { log4js.shutdown(resolve); });
const fileContents = await fs.readFile(testFile, "utf8");
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.test("with a max file size and no backups", async t => {
const testFile = path.join(__dirname, "fa-maxFileSize-test.log");
const logger = log4js.getLogger("max-file-size");
await removeFile(testFile);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await removeFile(testFile);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
file: {
type: "file",
filename: testFile,
maxLogSize: 100,
backups: 0
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
});
logger.info("This is the first log message.");
logger.info("This is an intermediate log message.");
logger.info("This is the second log message.");
// wait for the file system to catch up
await sleep(100);
const fileContents = await fs.readFile(testFile, "utf8");
t.match(fileContents, "This is the second log message.");
t.equal(fileContents.indexOf("This is the first log message."), -1);
const files = await fs.readdir(__dirname);
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-test.log")
);
t.equal(logFiles.length, 1, "should be 1 file");
t.end();
});
batch.test("with a max file size in wrong unit mode", async t => {
const invalidUnit = "1Z";
const expectedError = new Error(`maxLogSize: "${invalidUnit}" is invalid`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "file",
maxLogSize: invalidUnit
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("with a max file size in unit mode and no backups", async t => {
const testFile = path.join(__dirname, "fa-maxFileSize-unit-test.log");
const logger = log4js.getLogger("max-file-size-unit");
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
});
// log file of 1K = 1024 bytes maximum, no backups
log4js.configure({
appenders: {
file: {
type: "file",
filename: testFile,
maxLogSize: "1K",
backups: 0,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
});
const maxLine = 22; // 1024 max file size / 47 bytes per line
for (let i = 0; i < maxLine; i++) {
logger.info("These are the log messages for the first file."); // 46 bytes per line + '\n'
}
logger.info("This is the second log message.");
// wait for the file system to catch up
await sleep(100);
const fileContents = await fs.readFile(testFile, "utf8");
t.match(fileContents, "This is the second log message.");
t.notMatch(fileContents, "These are the log messages for the first file.");
const files = await fs.readdir(__dirname);
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-unit-test.log")
);
t.equal(logFiles.length, 1, "should be 1 file");
t.end();
});
batch.test("with a max file size and 2 backups", async t => {
const testFile = path.join(
__dirname,
"fa-maxFileSize-with-backups-test.log"
);
const logger = log4js.getLogger("max-file-size-backups");
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1`),
removeFile(`${testFile}.2`)
]);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1`),
removeFile(`${testFile}.2`)
]);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
file: {
type: "file",
filename: testFile,
maxLogSize: 50,
backups: 2
}
},
categories: { default: { appenders: ["file"], level: "debug" } }
});
logger.info("This is the first log message.");
logger.info("This is the second log message.");
logger.info("This is the third log message.");
logger.info("This is the fourth log message.");
// give the system a chance to open the stream
await sleep(200);
const files = await fs.readdir(__dirname);
const logFiles = files
.sort()
.filter(file => file.includes("fa-maxFileSize-with-backups-test.log"));
t.equal(logFiles.length, 3);
t.same(logFiles, [
"fa-maxFileSize-with-backups-test.log",
"fa-maxFileSize-with-backups-test.log.1",
"fa-maxFileSize-with-backups-test.log.2"
]);
let contents = await fs.readFile(path.join(__dirname, logFiles[0]), "utf8");
t.match(contents, "This is the fourth log message.");
contents = await fs.readFile(path.join(__dirname, logFiles[1]), "utf8");
t.match(contents, "This is the third log message.");
contents = await fs.readFile(path.join(__dirname, logFiles[2]), "utf8");
t.match(contents, "This is the second log message.");
t.end();
});
batch.test("with a max file size and 2 compressed backups", async t => {
const testFile = path.join(
__dirname,
"fa-maxFileSize-with-backups-compressed-test.log"
);
const logger = log4js.getLogger("max-file-size-backups");
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1.gz`),
removeFile(`${testFile}.2.gz`)
]);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1.gz`),
removeFile(`${testFile}.2.gz`)
]);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
file: {
type: "file",
filename: testFile,
maxLogSize: 50,
backups: 2,
compress: true
}
},
categories: { default: { appenders: ["file"], level: "debug" } }
});
logger.info("This is the first log message.");
logger.info("This is the second log message.");
logger.info("This is the third log message.");
logger.info("This is the fourth log message.");
// give the system a chance to open the stream
await sleep(1000);
const files = await fs.readdir(__dirname);
const logFiles = files
.sort()
.filter(file =>
file.includes("fa-maxFileSize-with-backups-compressed-test.log")
);
t.equal(logFiles.length, 3, "should be 3 files");
t.same(logFiles, [
"fa-maxFileSize-with-backups-compressed-test.log",
"fa-maxFileSize-with-backups-compressed-test.log.1.gz",
"fa-maxFileSize-with-backups-compressed-test.log.2.gz"
]);
let contents = await fs.readFile(path.join(__dirname, logFiles[0]), "utf8");
t.match(contents, "This is the fourth log message.");
contents = await gunzip(
await fs.readFile(path.join(__dirname, logFiles[1]))
);
t.match(contents.toString("utf8"), "This is the third log message.");
contents = await gunzip(
await fs.readFile(path.join(__dirname, logFiles[2]))
);
t.match(contents.toString("utf8"), "This is the second log message.");
t.end();
});
batch.test("handling of writer.writable", t => {
const output = [];
let writable = true;
const RollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
on() { // eslint-disable-line class-methods-use-this
}
get writable() { // eslint-disable-line class-methods-use-this
return writable;
}
};
const fileAppender = sandbox.require("../../lib/appenders/file", {
requires: {
streamroller: {
RollingFileStream
}
}
});
const appender = fileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout(loggingEvent) { return loggingEvent.data; } }
);
t.test("should log when writer.writable=true", assert => {
writable = true;
appender({data: "something to log"});
assert.ok(output.length, 1);
assert.match(output[output.length - 1], "something to log");
assert.end();
});
t.test("should not log when writer.writable=false", assert => {
writable = false;
appender({data: "this should not be logged"});
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], "this should not be logged");
assert.end();
});
t.end();
});
batch.test("when underlying stream errors", t => {
let consoleArgs;
let errorHandler;
const RollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === "error") {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const fileAppender = sandbox.require("../../lib/appenders/file", {
globals: {
console: {
error(...args) {
consoleArgs = args;
}
}
},
requires: {
streamroller: {
RollingFileStream
}
}
});
fileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: "aargh" });
t.test("should log the error to console.error", assert => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
"log4js.fileAppender - Writing to file %s, error happened "
);
assert.equal(consoleArgs[1], "test1.log");
assert.equal(consoleArgs[2].error, "aargh");
assert.end();
});
t.end();
});
batch.test("with removeColor fileAppender settings", async t => {
const testFilePlain = path.join(__dirname, "fa-removeColor-test.log");
const testFileAsIs = path.join(__dirname, "fa-asIs-test.log");
const logger = log4js.getLogger("default-settings");
await removeFile(testFilePlain);
await removeFile(testFileAsIs);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await removeFile(testFilePlain);
await removeFile(testFileAsIs);
});
log4js.configure({
appenders: {
plainFile: { type: "file", filename: testFilePlain, removeColor: true },
asIsFile: { type: "file", filename: testFileAsIs, removeColor: false }
},
categories: { default: { appenders: ["plainFile", "asIsFile"], level: "debug" } }
});
logger.info("This should be in the file.",
"\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m.", {}, []);
await sleep(100);
let fileContents = await fs.readFile(testFilePlain, "utf8");
t.match(fileContents, `This should be in the file. Color should be plain. {} []${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
fileContents = await fs.readFile(testFileAsIs, "utf8");
t.match(fileContents, "This should be in the file.",
`\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m. {} []${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.end();
});
| /* eslint max-classes-per-file: ["error", 2] */
const { test } = require("tap");
const fs = require("fs-extra");
const path = require("path");
const sandbox = require("@log4js-node/sandboxed-module");
const zlib = require("zlib");
const util = require("util");
const sleep = util.promisify(setTimeout);
const gunzip = util.promisify(zlib.gunzip);
const EOL = require("os").EOL || "\n";
const log4js = require("../../lib/log4js");
const removeFile = async filename => {
try {
await fs.unlink(filename);
} catch (e) {
// let's pretend this never happened
}
};
test("log4js fileAppender", batch => {
batch.test("with default fileAppender settings", async t => {
const testFile = path.join(__dirname, "fa-default-test.log");
const logger = log4js.getLogger("default-settings");
await removeFile(testFile);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await removeFile(testFile);
});
log4js.configure({
appenders: { file: { type: "file", filename: testFile } },
categories: { default: { appenders: ["file"], level: "debug" } }
});
logger.info("This should be in the file.");
await sleep(100);
const fileContents = await fs.readFile(testFile, "utf8");
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.test("should give error if invalid filename", async t => {
const file = "";
const expectedError = new Error(`Invalid filename: ${file}`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "file",
filename: file
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("should flush logs on shutdown", async t => {
const testFile = path.join(__dirname, "fa-default-test.log");
const logger = log4js.getLogger("default-settings");
await removeFile(testFile);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await removeFile(testFile);
});
log4js.configure({
appenders: { test: { type: "file", filename: testFile } },
categories: { default: { appenders: ["test"], level: "trace" } }
});
logger.info("1");
logger.info("2");
logger.info("3");
await new Promise(resolve => { log4js.shutdown(resolve); });
const fileContents = await fs.readFile(testFile, "utf8");
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.test("with a max file size and no backups", async t => {
const testFile = path.join(__dirname, "fa-maxFileSize-test.log");
const logger = log4js.getLogger("max-file-size");
await removeFile(testFile);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await removeFile(testFile);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
file: {
type: "file",
filename: testFile,
maxLogSize: 100,
backups: 0
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
});
logger.info("This is the first log message.");
logger.info("This is an intermediate log message.");
logger.info("This is the second log message.");
// wait for the file system to catch up
await sleep(100);
const fileContents = await fs.readFile(testFile, "utf8");
t.match(fileContents, "This is the second log message.");
t.equal(fileContents.indexOf("This is the first log message."), -1);
const files = await fs.readdir(__dirname);
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-test.log")
);
t.equal(logFiles.length, 1, "should be 1 file");
t.end();
});
batch.test("with a max file size in wrong unit mode", async t => {
const invalidUnit = "1Z";
const expectedError = new Error(`maxLogSize: "${invalidUnit}" is invalid`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "file",
maxLogSize: invalidUnit
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("with a max file size in unit mode and no backups", async t => {
const testFile = path.join(__dirname, "fa-maxFileSize-unit-test.log");
const logger = log4js.getLogger("max-file-size-unit");
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
});
// log file of 1K = 1024 bytes maximum, no backups
log4js.configure({
appenders: {
file: {
type: "file",
filename: testFile,
maxLogSize: "1K",
backups: 0,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
});
const maxLine = 22; // 1024 max file size / 47 bytes per line
for (let i = 0; i < maxLine; i++) {
logger.info("These are the log messages for the first file."); // 46 bytes per line + '\n'
}
logger.info("This is the second log message.");
// wait for the file system to catch up
await sleep(100);
const fileContents = await fs.readFile(testFile, "utf8");
t.match(fileContents, "This is the second log message.");
t.notMatch(fileContents, "These are the log messages for the first file.");
const files = await fs.readdir(__dirname);
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-unit-test.log")
);
t.equal(logFiles.length, 1, "should be 1 file");
t.end();
});
batch.test("with a max file size and 2 backups", async t => {
const testFile = path.join(
__dirname,
"fa-maxFileSize-with-backups-test.log"
);
const logger = log4js.getLogger("max-file-size-backups");
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1`),
removeFile(`${testFile}.2`)
]);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1`),
removeFile(`${testFile}.2`)
]);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
file: {
type: "file",
filename: testFile,
maxLogSize: 50,
backups: 2
}
},
categories: { default: { appenders: ["file"], level: "debug" } }
});
logger.info("This is the first log message.");
logger.info("This is the second log message.");
logger.info("This is the third log message.");
logger.info("This is the fourth log message.");
// give the system a chance to open the stream
await sleep(200);
const files = await fs.readdir(__dirname);
const logFiles = files
.sort()
.filter(file => file.includes("fa-maxFileSize-with-backups-test.log"));
t.equal(logFiles.length, 3);
t.same(logFiles, [
"fa-maxFileSize-with-backups-test.log",
"fa-maxFileSize-with-backups-test.log.1",
"fa-maxFileSize-with-backups-test.log.2"
]);
let contents = await fs.readFile(path.join(__dirname, logFiles[0]), "utf8");
t.match(contents, "This is the fourth log message.");
contents = await fs.readFile(path.join(__dirname, logFiles[1]), "utf8");
t.match(contents, "This is the third log message.");
contents = await fs.readFile(path.join(__dirname, logFiles[2]), "utf8");
t.match(contents, "This is the second log message.");
t.end();
});
batch.test("with a max file size and 2 compressed backups", async t => {
const testFile = path.join(
__dirname,
"fa-maxFileSize-with-backups-compressed-test.log"
);
const logger = log4js.getLogger("max-file-size-backups");
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1.gz`),
removeFile(`${testFile}.2.gz`)
]);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1.gz`),
removeFile(`${testFile}.2.gz`)
]);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
file: {
type: "file",
filename: testFile,
maxLogSize: 50,
backups: 2,
compress: true
}
},
categories: { default: { appenders: ["file"], level: "debug" } }
});
logger.info("This is the first log message.");
logger.info("This is the second log message.");
logger.info("This is the third log message.");
logger.info("This is the fourth log message.");
// give the system a chance to open the stream
await sleep(1000);
const files = await fs.readdir(__dirname);
const logFiles = files
.sort()
.filter(file =>
file.includes("fa-maxFileSize-with-backups-compressed-test.log")
);
t.equal(logFiles.length, 3, "should be 3 files");
t.same(logFiles, [
"fa-maxFileSize-with-backups-compressed-test.log",
"fa-maxFileSize-with-backups-compressed-test.log.1.gz",
"fa-maxFileSize-with-backups-compressed-test.log.2.gz"
]);
let contents = await fs.readFile(path.join(__dirname, logFiles[0]), "utf8");
t.match(contents, "This is the fourth log message.");
contents = await gunzip(
await fs.readFile(path.join(__dirname, logFiles[1]))
);
t.match(contents.toString("utf8"), "This is the third log message.");
contents = await gunzip(
await fs.readFile(path.join(__dirname, logFiles[2]))
);
t.match(contents.toString("utf8"), "This is the second log message.");
t.end();
});
batch.test("handling of writer.writable", t => {
const output = [];
let writable = true;
const RollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
on() { // eslint-disable-line class-methods-use-this
}
get writable() { // eslint-disable-line class-methods-use-this
return writable;
}
};
const fileAppender = sandbox.require("../../lib/appenders/file", {
requires: {
streamroller: {
RollingFileStream
}
}
});
const appender = fileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout(loggingEvent) { return loggingEvent.data; } }
);
t.test("should log when writer.writable=true", assert => {
writable = true;
appender({data: "something to log"});
assert.ok(output.length, 1);
assert.match(output[output.length - 1], "something to log");
assert.end();
});
t.test("should not log when writer.writable=false", assert => {
writable = false;
appender({data: "this should not be logged"});
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], "this should not be logged");
assert.end();
});
t.end();
});
batch.test("when underlying stream errors", t => {
let consoleArgs;
let errorHandler;
const RollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === "error") {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const fileAppender = sandbox.require("../../lib/appenders/file", {
globals: {
console: {
error(...args) {
consoleArgs = args;
}
}
},
requires: {
streamroller: {
RollingFileStream
}
}
});
fileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: "aargh" });
t.test("should log the error to console.error", assert => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
"log4js.fileAppender - Writing to file %s, error happened "
);
assert.equal(consoleArgs[1], "test1.log");
assert.equal(consoleArgs[2].error, "aargh");
assert.end();
});
t.end();
});
batch.test("with removeColor fileAppender settings", async t => {
const testFilePlain = path.join(__dirname, "fa-removeColor-test.log");
const testFileAsIs = path.join(__dirname, "fa-asIs-test.log");
const logger = log4js.getLogger("default-settings");
await removeFile(testFilePlain);
await removeFile(testFileAsIs);
t.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
await removeFile(testFilePlain);
await removeFile(testFileAsIs);
});
log4js.configure({
appenders: {
plainFile: { type: "file", filename: testFilePlain, removeColor: true },
asIsFile: { type: "file", filename: testFileAsIs, removeColor: false }
},
categories: { default: { appenders: ["plainFile", "asIsFile"], level: "debug" } }
});
logger.info("This should be in the file.",
"\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m.", {}, []);
await sleep(100);
let fileContents = await fs.readFile(testFilePlain, "utf8");
t.match(fileContents, `This should be in the file. Color should be plain. {} []${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
fileContents = await fs.readFile(testFileAsIs, "utf8");
t.match(fileContents, "This should be in the file.",
`\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m. {} []${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./lib/appenders/multiFile.js |
const debug = require('debug')('log4js:multiFile');
const path = require('path');
const fileAppender = require('./file');
const findFileKey = (property, event) => event[property] || event.context[property];
module.exports.configure = (config, layouts) => {
debug('Creating a multi-file appender');
const files = new Map();
const timers = new Map();
function checkForTimeout(fileKey) {
const timer = timers.get(fileKey);
const app = files.get(fileKey);
/* istanbul ignore else: failsafe */
if (timer && app) {
if (Date.now() - timer.lastUsed > timer.timeout) {
debug('%s not used for > %d ms => close', fileKey, timer.timeout);
clearInterval(timer.interval);
timers.delete(fileKey);
files.delete(fileKey);
app.shutdown((err) => {
if (err) {
debug('ignore error on file shutdown: %s', err.message);
}
});
}
} else {
// will never get here as files and timers are coupled to be added and deleted at same place
debug('timer or app does not exist');
}
}
const appender = (logEvent) => {
const fileKey = findFileKey(config.property, logEvent);
debug('fileKey for property ', config.property, ' is ', fileKey);
if (fileKey) {
let file = files.get(fileKey);
debug('existing file appender is ', file);
if (!file) {
debug('creating new file appender');
config.filename = path.join(config.base, fileKey + config.extension);
file = fileAppender.configure(config, layouts);
files.set(fileKey, file);
if (config.timeout) {
debug('creating new timer');
timers.set(fileKey, {
timeout: config.timeout,
lastUsed: Date.now(),
interval: setInterval(checkForTimeout.bind(null, fileKey), config.timeout)
});
}
} else if (config.timeout) {
debug('%s extending activity', fileKey);
timers.get(fileKey).lastUsed = Date.now();
}
file(logEvent);
} else {
debug('No fileKey for logEvent, quietly ignoring this log event');
}
};
appender.shutdown = (cb) => {
let shutdownFunctions = files.size;
if (shutdownFunctions <= 0) {
cb();
}
let error;
timers.forEach((timer, fileKey) => {
debug('clearing timer for ', fileKey);
clearInterval(timer.interval);
});
files.forEach((app, fileKey) => {
debug('calling shutdown for ', fileKey);
app.shutdown((err) => {
error = error || err;
shutdownFunctions -= 1;
if (shutdownFunctions <= 0) {
cb(error);
}
});
});
};
return appender;
};
|
const debug = require('debug')('log4js:multiFile');
const path = require('path');
const fileAppender = require('./file');
const findFileKey = (property, event) => event[property] || event.context[property];
module.exports.configure = (config, layouts) => {
debug('Creating a multi-file appender');
const files = new Map();
const timers = new Map();
function checkForTimeout(fileKey) {
const timer = timers.get(fileKey);
const app = files.get(fileKey);
/* istanbul ignore else: failsafe */
if (timer && app) {
if (Date.now() - timer.lastUsed > timer.timeout) {
debug('%s not used for > %d ms => close', fileKey, timer.timeout);
clearInterval(timer.interval);
timers.delete(fileKey);
files.delete(fileKey);
app.shutdown((err) => {
if (err) {
debug('ignore error on file shutdown: %s', err.message);
}
});
}
} else {
// will never get here as files and timers are coupled to be added and deleted at same place
debug('timer or app does not exist');
}
}
const appender = (logEvent) => {
const fileKey = findFileKey(config.property, logEvent);
debug('fileKey for property ', config.property, ' is ', fileKey);
if (fileKey) {
let file = files.get(fileKey);
debug('existing file appender is ', file);
if (!file) {
debug('creating new file appender');
config.filename = path.join(config.base, fileKey + config.extension);
file = fileAppender.configure(config, layouts);
files.set(fileKey, file);
if (config.timeout) {
debug('creating new timer');
timers.set(fileKey, {
timeout: config.timeout,
lastUsed: Date.now(),
interval: setInterval(checkForTimeout.bind(null, fileKey), config.timeout)
});
}
} else if (config.timeout) {
debug('%s extending activity', fileKey);
timers.get(fileKey).lastUsed = Date.now();
}
file(logEvent);
} else {
debug('No fileKey for logEvent, quietly ignoring this log event');
}
};
appender.shutdown = (cb) => {
let shutdownFunctions = files.size;
if (shutdownFunctions <= 0) {
cb();
}
let error;
timers.forEach((timer, fileKey) => {
debug('clearing timer for ', fileKey);
clearInterval(timer.interval);
});
files.forEach((app, fileKey) => {
debug('calling shutdown for ', fileKey);
app.shutdown((err) => {
error = error || err;
shutdownFunctions -= 1;
if (shutdownFunctions <= 0) {
cb(error);
}
});
});
};
return appender;
};
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/layouts-test.js | const { test } = require("tap");
const debug = require("debug");
const os = require("os");
const path = require("path");
const { EOL } = os;
// used for patternLayout tests.
function testPattern(assert, layout, event, tokens, pattern, value) {
assert.equal(layout(pattern, tokens)(event), value);
}
test("log4js layouts", batch => {
batch.test("colouredLayout", t => {
const layout = require("../../lib/layouts").colouredLayout;
t.test("should apply level colour codes to output", assert => {
const output = layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString() {
return "ERROR";
},
colour: "red"
}
});
assert.equal(
output,
"\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense"
);
assert.end();
});
t.test("should support the console.log format for the message", assert => {
const output = layout({
data: ["thing %d", 2],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString() {
return "ERROR";
},
colour: "red"
}
});
assert.equal(
output,
"\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2"
);
assert.end();
});
t.end();
});
batch.test("messagePassThroughLayout", t => {
const layout = require("../../lib/layouts").messagePassThroughLayout;
t.equal(
layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"nonsense",
"should take a logevent and output only the message"
);
t.equal(
layout({
data: ["thing %d", 1, "cheese"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"thing 1 cheese",
"should support the console.log format for the message"
);
t.equal(
layout({
data: [{ thing: 1 }],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"{ thing: 1 }",
"should output the first item even if it is not a string"
);
t.match(
layout({
data: [new Error()],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
/at (Test\.batch\.test\.t|Test\.<anonymous>)\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
"regexp did not return a match - should print the stacks of a passed error objects"
);
t.test("with passed augmented errors", assert => {
const e = new Error("My Unique Error Message");
e.augmented = "My Unique attribute value";
e.augObj = { at1: "at2" };
const layoutOutput = layout({
data: [e],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
});
assert.match(
layoutOutput,
/Error: My Unique Error Message/,
"should print the contained error message"
);
assert.match(
layoutOutput,
/augmented:\s'My Unique attribute value'/,
"should print error augmented string attributes"
);
assert.match(
layoutOutput,
/augObj:\s\{ at1: 'at2' \}/,
"should print error augmented object attributes"
);
assert.end();
});
t.end();
});
batch.test("basicLayout", t => {
const layout = require("../../lib/layouts").basicLayout;
const event = {
data: ["this is a test"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "tests",
level: {
toString() {
return "DEBUG";
}
}
};
t.equal(
layout(event),
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test"
);
t.test(
"should output a stacktrace, message if the event has an error attached",
assert => {
let i;
const error = new Error("Some made-up error");
const stack = error.stack.split(/\n/);
event.data = ["this is a test", error];
const output = layout(event);
const lines = output.split(/\n/);
assert.equal(lines.length, stack.length);
assert.equal(
lines[0],
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error"
);
for (i = 1; i < stack.length; i++) {
assert.equal(lines[i], stack[i]);
}
assert.end();
}
);
t.test(
"should output any extra data in the log event as util.inspect strings",
assert => {
event.data = [
"this is a test",
{
name: "Cheese",
message: "Gorgonzola smells."
}
];
const output = layout(event);
assert.equal(
output,
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test " +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
assert.end();
}
);
t.end();
});
batch.test("dummyLayout", t => {
const layout = require("../../lib/layouts").dummyLayout;
t.test("should output just the first element of the log data", assert => {
const event = {
data: ["this is the first value", "this is not"],
startTime: new Date("2010-12-05 14:18:30.045"),
categoryName: "multiple.levels.of.tests",
level: {
toString() {
return "DEBUG";
},
colour: "cyan"
}
};
assert.equal(layout(event), "this is the first value");
assert.end();
});
t.end();
});
batch.test("patternLayout", t => {
const originalListener = process.listeners("warning")[process.listeners("warning").length - 1];
const warningListener = error => {
if (error.name === "DeprecationWarning") {
if (error.code.startsWith("log4js-node-DEP0003") || error.code.startsWith("log4js-node-DEP0004")) {
return;
}
}
originalListener(error);
};
process.off("warning", originalListener);
process.on("warning", warningListener);
const debugWasEnabled = debug.enabled("log4js:layouts");
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
const originalNamespace = debug.disable();
debug.enable(`${originalNamespace}, log4js:layouts`);
batch.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off("warning", warningListener);
process.on("warning", originalListener);
});
process.stderr.write = originalWrite;
debug.enable(originalNamespace);
});
const tokens = {
testString: "testStringToken",
testFunction() {
return "testFunctionToken";
},
fnThatUsesLogEvent(logEvent) {
return logEvent.level.toString();
}
};
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
" at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)"; // eslint-disable-line max-len
const fileName = path.normalize("/log4js-node/test/tap/layouts-test.js");
const lineNumber = 1;
const columnNumber = 14;
const event = {
data: ["this is a test"],
startTime: new Date("2010-12-05 14:18:30.045"),
categoryName: "multiple.levels.of.tests",
level: {
toString() {
return "DEBUG";
},
colour: "cyan"
},
context: tokens,
// location
callStack,
fileName,
lineNumber,
columnNumber
};
event.startTime.getTimezoneOffset = () => -600;
const layout = require("../../lib/layouts").patternLayout;
t.test('should default to "time logLevel loggerName - message"', assert => {
testPattern(
assert,
layout,
event,
tokens,
null,
`14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`
);
assert.end();
});
t.test("%r should output time only", assert => {
testPattern(assert, layout, event, tokens, "%r", "14:18:30");
assert.end();
});
t.test("%p should output the log level", assert => {
testPattern(assert, layout, event, tokens, "%p", "DEBUG");
assert.end();
});
t.test("%c should output the log category", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%c",
"multiple.levels.of.tests"
);
assert.end();
});
t.test("%m should output the log data", assert => {
testPattern(assert, layout, event, tokens, "%m", "this is a test");
assert.end();
});
t.test("%n should output a new line", assert => {
testPattern(assert, layout, event, tokens, "%n", EOL);
assert.end();
});
t.test("%h should output hostname", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%h",
os.hostname().toString()
);
assert.end();
});
t.test("%z should output pid", assert => {
testPattern(assert, layout, event, tokens, "%z", process.pid.toString());
assert.end();
});
t.test("%z should pick up pid from log event if present", assert => {
event.pid = "1234";
testPattern(assert, layout, event, tokens, "%z", "1234");
delete event.pid;
assert.end();
});
t.test("%y should output pid (was cluster info)", assert => {
testPattern(assert, layout, event, tokens, "%y", process.pid.toString());
assert.end();
});
t.test(
"%c should handle category names like java-style package names",
assert => {
testPattern(assert, layout, event, tokens, "%c{1}", "tests");
testPattern(assert, layout, event, tokens, "%c{2}", "of.tests");
testPattern(assert, layout, event, tokens, "%c{3}", "levels.of.tests");
testPattern(
assert,
layout,
event,
tokens,
"%c{4}",
"multiple.levels.of.tests"
);
testPattern(
assert,
layout,
event,
tokens,
"%c{5}",
"multiple.levels.of.tests"
);
testPattern(
assert,
layout,
event,
tokens,
"%c{99}",
"multiple.levels.of.tests"
);
assert.end();
}
);
t.test("%d should output the date in ISO8601 format", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%d",
"2010-12-05T14:18:30.045"
);
assert.end();
});
t.test("%d should allow for format specification", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%d{ISO8601}",
"2010-12-05T14:18:30.045"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{ISO8601_WITH_TZ_OFFSET}",
"2010-12-05T14:18:30.045+10:00"
);
const DEP0003 = debugLogs.filter((e) => e.indexOf("log4js-node-DEP0003") > -1).length;
testPattern(
assert,
layout,
event,
tokens,
"%d{ABSOLUTE}", // deprecated
"14:18:30.045"
);
assert.equal(
debugLogs.filter((e) => e.indexOf("log4js-node-DEP0003") > -1).length,
DEP0003 + 1,
"deprecation log4js-node-DEP0003 emitted"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{ABSOLUTETIME}",
"14:18:30.045"
);
const DEP0004 = debugLogs.filter((e) => e.indexOf("log4js-node-DEP0004") > -1).length;
testPattern(
assert,
layout,
event,
tokens,
"%d{DATE}", // deprecated
"05 12 2010 14:18:30.045"
);
assert.equal(
debugLogs.filter((e) => e.indexOf("log4js-node-DEP0004") > -1).length,
DEP0004 + 1,
"deprecation log4js-node-DEP0004 emitted"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{DATETIME}",
"05 12 2010 14:18:30.045"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yy MM dd hh mm ss}",
"10 12 05 14 18 30"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yyyy MM dd}",
"2010 12 05"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yyyy MM dd hh mm ss SSS}",
"2010 12 05 14 18 30 045"
);
assert.end();
});
t.test("%% should output %", assert => {
testPattern(assert, layout, event, tokens, "%%", "%");
assert.end();
});
t.test("%f should output filename", assert => {
testPattern(assert, layout, event, tokens, "%f", fileName);
assert.end();
});
t.test("%f should handle filename depth", assert => {
testPattern(assert, layout, event, tokens, "%f{1}", "layouts-test.js");
testPattern(
assert,
layout,
event,
tokens,
"%f{2}",
path.join("tap", "layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{3}",
path.join("test", "tap", "layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{4}",
path.join("log4js-node","test","tap","layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{5}",
path.join("/log4js-node","test","tap","layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{99}",
path.join("/log4js-node","test","tap","layouts-test.js")
);
assert.end();
});
t.test("%f should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%.5f", fileName.slice(0, 5));
testPattern(assert, layout, event, tokens, "%20f{1}", " layouts-test.js");
testPattern(assert, layout, event, tokens, "%30.30f{2}", ` ${ path.join("tap","layouts-test.js")}`);
testPattern(assert, layout, event, tokens, "%10.-5f{1}", " st.js");
assert.end();
});
t.test("%l should output line number", assert => {
testPattern(assert, layout, event, tokens, "%l", lineNumber.toString());
assert.end();
});
t.test("%l should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%5.10l", " 1");
testPattern(assert, layout, event, tokens, "%.5l", "1");
testPattern(assert, layout, event, tokens, "%.-5l", "1");
testPattern(assert, layout, event, tokens, "%-5l", "1 ");
assert.end();
});
t.test("%o should output column postion", assert => {
testPattern(assert, layout, event, tokens, "%o", columnNumber.toString());
assert.end();
});
t.test("%o should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%5.10o", " 14");
testPattern(assert, layout, event, tokens, "%.5o", "14");
testPattern(assert, layout, event, tokens, "%.1o", "1");
testPattern(assert, layout, event, tokens, "%.-1o", "4");
testPattern(assert, layout, event, tokens, "%-5o", "14 ");
assert.end();
});
t.test("%s should output stack", assert => {
testPattern(assert, layout, event, tokens, "%s", callStack);
assert.end();
});
t.test("%f should output empty string when fileName not exist", assert => {
delete event.fileName;
testPattern(assert, layout, event, tokens, "%f", "");
assert.end();
});
t.test(
"%l should output empty string when lineNumber not exist",
assert => {
delete event.lineNumber;
testPattern(assert, layout, event, tokens, "%l", "");
assert.end();
}
);
t.test(
"%o should output empty string when columnNumber not exist",
assert => {
delete event.columnNumber;
testPattern(assert, layout, event, tokens, "%o", "");
assert.end();
}
);
t.test("%s should output empty string when callStack not exist", assert => {
delete event.callStack;
testPattern(assert, layout, event, tokens, "%s", "");
assert.end();
});
t.test("should output anything not preceded by % as literal", assert => {
testPattern(
assert,
layout,
event,
tokens,
"blah blah blah",
"blah blah blah"
);
assert.end();
});
t.test(
"should output the original string if no replacer matches the token",
assert => {
testPattern(assert, layout, event, tokens, "%a{3}", "a{3}");
assert.end();
}
);
t.test("should handle complicated patterns", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%m%n %c{2} at %d{ABSOLUTE} cheese %p%n", // deprecated
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
testPattern(
assert,
layout,
event,
tokens,
"%m%n %c{2} at %d{ABSOLUTETIME} cheese %p%n",
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
assert.end();
});
t.test("should truncate fields if specified", assert => {
testPattern(assert, layout, event, tokens, "%.4m", "this");
testPattern(assert, layout, event, tokens, "%.7m", "this is");
testPattern(assert, layout, event, tokens, "%.9m", "this is a");
testPattern(assert, layout, event, tokens, "%.14m", "this is a test");
testPattern(
assert,
layout,
event,
tokens,
"%.2919102m",
"this is a test"
);
testPattern(assert, layout, event, tokens, "%.-4m", "test");
assert.end();
});
t.test("should pad fields if specified", assert => {
testPattern(assert, layout, event, tokens, "%10p", " DEBUG");
testPattern(assert, layout, event, tokens, "%8p", " DEBUG");
testPattern(assert, layout, event, tokens, "%6p", " DEBUG");
testPattern(assert, layout, event, tokens, "%4p", "DEBUG");
testPattern(assert, layout, event, tokens, "%-4p", "DEBUG");
testPattern(assert, layout, event, tokens, "%-6p", "DEBUG ");
testPattern(assert, layout, event, tokens, "%-8p", "DEBUG ");
testPattern(assert, layout, event, tokens, "%-10p", "DEBUG ");
assert.end();
});
t.test("%[%r%] should output colored time", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%[%r%]",
"\x1B[36m14:18:30\x1B[39m"
);
assert.end();
});
t.test(
"%x{testString} should output the string stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{testString}",
"testStringToken"
);
assert.end();
}
);
t.test(
"%x{testFunction} should output the result of the function stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{testFunction}",
"testFunctionToken"
);
assert.end();
}
);
t.test(
"%x{doesNotExist} should output the string stored in tokens",
assert => {
testPattern(assert, layout, event, tokens, "%x{doesNotExist}", "null");
assert.end();
}
);
t.test(
"%x{fnThatUsesLogEvent} should be able to use the logEvent",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{fnThatUsesLogEvent}",
"DEBUG"
);
assert.end();
}
);
t.test("%x should output the string stored in tokens", assert => {
testPattern(assert, layout, event, tokens, "%x", "null");
assert.end();
});
t.test(
"%X{testString} should output the string stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{testString}",
"testStringToken"
);
assert.end();
}
);
t.test(
"%X{testFunction} should output the result of the function stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{testFunction}",
"testFunctionToken"
);
assert.end();
}
);
t.test(
"%X{doesNotExist} should output the string stored in tokens",
assert => {
testPattern(assert, layout, event, {}, "%X{doesNotExist}", "null");
assert.end();
}
);
t.test(
"%X{fnThatUsesLogEvent} should be able to use the logEvent",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{fnThatUsesLogEvent}",
"DEBUG"
);
assert.end();
}
);
t.test("%X should output the string stored in tokens", assert => {
testPattern(assert, layout, event, {}, "%X", "null");
assert.end();
});
t.end();
});
batch.test("layout makers", t => {
const layouts = require("../../lib/layouts");
t.test("should have a maker for each layout", assert => {
assert.ok(layouts.layout("messagePassThrough"));
assert.ok(layouts.layout("basic"));
assert.ok(layouts.layout("colored"));
assert.ok(layouts.layout("coloured"));
assert.ok(layouts.layout("pattern"));
assert.ok(layouts.layout("dummy"));
assert.end();
});
t.test(
"layout pattern maker should pass pattern and tokens to layout from config",
assert => {
let layout = layouts.layout("pattern", { pattern: "%%" });
assert.equal(layout({}), "%");
layout = layouts.layout("pattern", {
pattern: "%x{testStringToken}",
tokens: { testStringToken: "cheese" }
});
assert.equal(layout({}), "cheese");
assert.end();
}
);
t.end();
});
batch.test("add layout", t => {
const layouts = require("../../lib/layouts");
t.test("should be able to add a layout", assert => {
layouts.addLayout("test_layout", config => {
assert.equal(config, "test_config");
return function(logEvent) {
return `TEST LAYOUT >${logEvent.data}`;
};
});
const serializer = layouts.layout("test_layout", "test_config");
assert.ok(serializer);
assert.equal(serializer({ data: "INPUT" }), "TEST LAYOUT >INPUT");
assert.end();
});
t.end();
});
batch.end();
});
| const { test } = require("tap");
const debug = require("debug");
const os = require("os");
const path = require("path");
const { EOL } = os;
// used for patternLayout tests.
function testPattern(assert, layout, event, tokens, pattern, value) {
assert.equal(layout(pattern, tokens)(event), value);
}
test("log4js layouts", batch => {
batch.test("colouredLayout", t => {
const layout = require("../../lib/layouts").colouredLayout;
t.test("should apply level colour codes to output", assert => {
const output = layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString() {
return "ERROR";
},
colour: "red"
}
});
assert.equal(
output,
"\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense"
);
assert.end();
});
t.test("should support the console.log format for the message", assert => {
const output = layout({
data: ["thing %d", 2],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString() {
return "ERROR";
},
colour: "red"
}
});
assert.equal(
output,
"\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2"
);
assert.end();
});
t.end();
});
batch.test("messagePassThroughLayout", t => {
const layout = require("../../lib/layouts").messagePassThroughLayout;
t.equal(
layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"nonsense",
"should take a logevent and output only the message"
);
t.equal(
layout({
data: ["thing %d", 1, "cheese"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"thing 1 cheese",
"should support the console.log format for the message"
);
t.equal(
layout({
data: [{ thing: 1 }],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"{ thing: 1 }",
"should output the first item even if it is not a string"
);
t.match(
layout({
data: [new Error()],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
/at (Test\.batch\.test\.t|Test\.<anonymous>)\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
"regexp did not return a match - should print the stacks of a passed error objects"
);
t.test("with passed augmented errors", assert => {
const e = new Error("My Unique Error Message");
e.augmented = "My Unique attribute value";
e.augObj = { at1: "at2" };
const layoutOutput = layout({
data: [e],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
});
assert.match(
layoutOutput,
/Error: My Unique Error Message/,
"should print the contained error message"
);
assert.match(
layoutOutput,
/augmented:\s'My Unique attribute value'/,
"should print error augmented string attributes"
);
assert.match(
layoutOutput,
/augObj:\s\{ at1: 'at2' \}/,
"should print error augmented object attributes"
);
assert.end();
});
t.end();
});
batch.test("basicLayout", t => {
const layout = require("../../lib/layouts").basicLayout;
const event = {
data: ["this is a test"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "tests",
level: {
toString() {
return "DEBUG";
}
}
};
t.equal(
layout(event),
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test"
);
t.test(
"should output a stacktrace, message if the event has an error attached",
assert => {
let i;
const error = new Error("Some made-up error");
const stack = error.stack.split(/\n/);
event.data = ["this is a test", error];
const output = layout(event);
const lines = output.split(/\n/);
assert.equal(lines.length, stack.length);
assert.equal(
lines[0],
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error"
);
for (i = 1; i < stack.length; i++) {
assert.equal(lines[i], stack[i]);
}
assert.end();
}
);
t.test(
"should output any extra data in the log event as util.inspect strings",
assert => {
event.data = [
"this is a test",
{
name: "Cheese",
message: "Gorgonzola smells."
}
];
const output = layout(event);
assert.equal(
output,
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test " +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
assert.end();
}
);
t.end();
});
batch.test("dummyLayout", t => {
const layout = require("../../lib/layouts").dummyLayout;
t.test("should output just the first element of the log data", assert => {
const event = {
data: ["this is the first value", "this is not"],
startTime: new Date("2010-12-05 14:18:30.045"),
categoryName: "multiple.levels.of.tests",
level: {
toString() {
return "DEBUG";
},
colour: "cyan"
}
};
assert.equal(layout(event), "this is the first value");
assert.end();
});
t.end();
});
batch.test("patternLayout", t => {
const originalListener = process.listeners("warning")[process.listeners("warning").length - 1];
const warningListener = error => {
if (error.name === "DeprecationWarning") {
if (error.code.startsWith("log4js-node-DEP0003") || error.code.startsWith("log4js-node-DEP0004")) {
return;
}
}
originalListener(error);
};
process.off("warning", originalListener);
process.on("warning", warningListener);
const debugWasEnabled = debug.enabled("log4js:layouts");
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
const originalNamespace = debug.disable();
debug.enable(`${originalNamespace}, log4js:layouts`);
batch.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off("warning", warningListener);
process.on("warning", originalListener);
});
process.stderr.write = originalWrite;
debug.enable(originalNamespace);
});
const tokens = {
testString: "testStringToken",
testFunction() {
return "testFunctionToken";
},
fnThatUsesLogEvent(logEvent) {
return logEvent.level.toString();
}
};
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
" at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)"; // eslint-disable-line max-len
const fileName = path.normalize("/log4js-node/test/tap/layouts-test.js");
const lineNumber = 1;
const columnNumber = 14;
const event = {
data: ["this is a test"],
startTime: new Date("2010-12-05 14:18:30.045"),
categoryName: "multiple.levels.of.tests",
level: {
toString() {
return "DEBUG";
},
colour: "cyan"
},
context: tokens,
// location
callStack,
fileName,
lineNumber,
columnNumber
};
event.startTime.getTimezoneOffset = () => -600;
const layout = require("../../lib/layouts").patternLayout;
t.test('should default to "time logLevel loggerName - message"', assert => {
testPattern(
assert,
layout,
event,
tokens,
null,
`14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`
);
assert.end();
});
t.test("%r should output time only", assert => {
testPattern(assert, layout, event, tokens, "%r", "14:18:30");
assert.end();
});
t.test("%p should output the log level", assert => {
testPattern(assert, layout, event, tokens, "%p", "DEBUG");
assert.end();
});
t.test("%c should output the log category", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%c",
"multiple.levels.of.tests"
);
assert.end();
});
t.test("%m should output the log data", assert => {
testPattern(assert, layout, event, tokens, "%m", "this is a test");
assert.end();
});
t.test("%n should output a new line", assert => {
testPattern(assert, layout, event, tokens, "%n", EOL);
assert.end();
});
t.test("%h should output hostname", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%h",
os.hostname().toString()
);
assert.end();
});
t.test("%z should output pid", assert => {
testPattern(assert, layout, event, tokens, "%z", process.pid.toString());
assert.end();
});
t.test("%z should pick up pid from log event if present", assert => {
event.pid = "1234";
testPattern(assert, layout, event, tokens, "%z", "1234");
delete event.pid;
assert.end();
});
t.test("%y should output pid (was cluster info)", assert => {
testPattern(assert, layout, event, tokens, "%y", process.pid.toString());
assert.end();
});
t.test(
"%c should handle category names like java-style package names",
assert => {
testPattern(assert, layout, event, tokens, "%c{1}", "tests");
testPattern(assert, layout, event, tokens, "%c{2}", "of.tests");
testPattern(assert, layout, event, tokens, "%c{3}", "levels.of.tests");
testPattern(
assert,
layout,
event,
tokens,
"%c{4}",
"multiple.levels.of.tests"
);
testPattern(
assert,
layout,
event,
tokens,
"%c{5}",
"multiple.levels.of.tests"
);
testPattern(
assert,
layout,
event,
tokens,
"%c{99}",
"multiple.levels.of.tests"
);
assert.end();
}
);
t.test("%d should output the date in ISO8601 format", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%d",
"2010-12-05T14:18:30.045"
);
assert.end();
});
t.test("%d should allow for format specification", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%d{ISO8601}",
"2010-12-05T14:18:30.045"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{ISO8601_WITH_TZ_OFFSET}",
"2010-12-05T14:18:30.045+10:00"
);
const DEP0003 = debugLogs.filter((e) => e.indexOf("log4js-node-DEP0003") > -1).length;
testPattern(
assert,
layout,
event,
tokens,
"%d{ABSOLUTE}", // deprecated
"14:18:30.045"
);
assert.equal(
debugLogs.filter((e) => e.indexOf("log4js-node-DEP0003") > -1).length,
DEP0003 + 1,
"deprecation log4js-node-DEP0003 emitted"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{ABSOLUTETIME}",
"14:18:30.045"
);
const DEP0004 = debugLogs.filter((e) => e.indexOf("log4js-node-DEP0004") > -1).length;
testPattern(
assert,
layout,
event,
tokens,
"%d{DATE}", // deprecated
"05 12 2010 14:18:30.045"
);
assert.equal(
debugLogs.filter((e) => e.indexOf("log4js-node-DEP0004") > -1).length,
DEP0004 + 1,
"deprecation log4js-node-DEP0004 emitted"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{DATETIME}",
"05 12 2010 14:18:30.045"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yy MM dd hh mm ss}",
"10 12 05 14 18 30"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yyyy MM dd}",
"2010 12 05"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yyyy MM dd hh mm ss SSS}",
"2010 12 05 14 18 30 045"
);
assert.end();
});
t.test("%% should output %", assert => {
testPattern(assert, layout, event, tokens, "%%", "%");
assert.end();
});
t.test("%f should output filename", assert => {
testPattern(assert, layout, event, tokens, "%f", fileName);
assert.end();
});
t.test("%f should handle filename depth", assert => {
testPattern(assert, layout, event, tokens, "%f{1}", "layouts-test.js");
testPattern(
assert,
layout,
event,
tokens,
"%f{2}",
path.join("tap", "layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{3}",
path.join("test", "tap", "layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{4}",
path.join("log4js-node","test","tap","layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{5}",
path.join("/log4js-node","test","tap","layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{99}",
path.join("/log4js-node","test","tap","layouts-test.js")
);
assert.end();
});
t.test("%f should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%.5f", fileName.slice(0, 5));
testPattern(assert, layout, event, tokens, "%20f{1}", " layouts-test.js");
testPattern(assert, layout, event, tokens, "%30.30f{2}", ` ${ path.join("tap","layouts-test.js")}`);
testPattern(assert, layout, event, tokens, "%10.-5f{1}", " st.js");
assert.end();
});
t.test("%l should output line number", assert => {
testPattern(assert, layout, event, tokens, "%l", lineNumber.toString());
assert.end();
});
t.test("%l should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%5.10l", " 1");
testPattern(assert, layout, event, tokens, "%.5l", "1");
testPattern(assert, layout, event, tokens, "%.-5l", "1");
testPattern(assert, layout, event, tokens, "%-5l", "1 ");
assert.end();
});
t.test("%o should output column postion", assert => {
testPattern(assert, layout, event, tokens, "%o", columnNumber.toString());
assert.end();
});
t.test("%o should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%5.10o", " 14");
testPattern(assert, layout, event, tokens, "%.5o", "14");
testPattern(assert, layout, event, tokens, "%.1o", "1");
testPattern(assert, layout, event, tokens, "%.-1o", "4");
testPattern(assert, layout, event, tokens, "%-5o", "14 ");
assert.end();
});
t.test("%s should output stack", assert => {
testPattern(assert, layout, event, tokens, "%s", callStack);
assert.end();
});
t.test("%f should output empty string when fileName not exist", assert => {
delete event.fileName;
testPattern(assert, layout, event, tokens, "%f", "");
assert.end();
});
t.test(
"%l should output empty string when lineNumber not exist",
assert => {
delete event.lineNumber;
testPattern(assert, layout, event, tokens, "%l", "");
assert.end();
}
);
t.test(
"%o should output empty string when columnNumber not exist",
assert => {
delete event.columnNumber;
testPattern(assert, layout, event, tokens, "%o", "");
assert.end();
}
);
t.test("%s should output empty string when callStack not exist", assert => {
delete event.callStack;
testPattern(assert, layout, event, tokens, "%s", "");
assert.end();
});
t.test("should output anything not preceded by % as literal", assert => {
testPattern(
assert,
layout,
event,
tokens,
"blah blah blah",
"blah blah blah"
);
assert.end();
});
t.test(
"should output the original string if no replacer matches the token",
assert => {
testPattern(assert, layout, event, tokens, "%a{3}", "a{3}");
assert.end();
}
);
t.test("should handle complicated patterns", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%m%n %c{2} at %d{ABSOLUTE} cheese %p%n", // deprecated
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
testPattern(
assert,
layout,
event,
tokens,
"%m%n %c{2} at %d{ABSOLUTETIME} cheese %p%n",
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
assert.end();
});
t.test("should truncate fields if specified", assert => {
testPattern(assert, layout, event, tokens, "%.4m", "this");
testPattern(assert, layout, event, tokens, "%.7m", "this is");
testPattern(assert, layout, event, tokens, "%.9m", "this is a");
testPattern(assert, layout, event, tokens, "%.14m", "this is a test");
testPattern(
assert,
layout,
event,
tokens,
"%.2919102m",
"this is a test"
);
testPattern(assert, layout, event, tokens, "%.-4m", "test");
assert.end();
});
t.test("should pad fields if specified", assert => {
testPattern(assert, layout, event, tokens, "%10p", " DEBUG");
testPattern(assert, layout, event, tokens, "%8p", " DEBUG");
testPattern(assert, layout, event, tokens, "%6p", " DEBUG");
testPattern(assert, layout, event, tokens, "%4p", "DEBUG");
testPattern(assert, layout, event, tokens, "%-4p", "DEBUG");
testPattern(assert, layout, event, tokens, "%-6p", "DEBUG ");
testPattern(assert, layout, event, tokens, "%-8p", "DEBUG ");
testPattern(assert, layout, event, tokens, "%-10p", "DEBUG ");
assert.end();
});
t.test("%[%r%] should output colored time", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%[%r%]",
"\x1B[36m14:18:30\x1B[39m"
);
assert.end();
});
t.test(
"%x{testString} should output the string stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{testString}",
"testStringToken"
);
assert.end();
}
);
t.test(
"%x{testFunction} should output the result of the function stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{testFunction}",
"testFunctionToken"
);
assert.end();
}
);
t.test(
"%x{doesNotExist} should output the string stored in tokens",
assert => {
testPattern(assert, layout, event, tokens, "%x{doesNotExist}", "null");
assert.end();
}
);
t.test(
"%x{fnThatUsesLogEvent} should be able to use the logEvent",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{fnThatUsesLogEvent}",
"DEBUG"
);
assert.end();
}
);
t.test("%x should output the string stored in tokens", assert => {
testPattern(assert, layout, event, tokens, "%x", "null");
assert.end();
});
t.test(
"%X{testString} should output the string stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{testString}",
"testStringToken"
);
assert.end();
}
);
t.test(
"%X{testFunction} should output the result of the function stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{testFunction}",
"testFunctionToken"
);
assert.end();
}
);
t.test(
"%X{doesNotExist} should output the string stored in tokens",
assert => {
testPattern(assert, layout, event, {}, "%X{doesNotExist}", "null");
assert.end();
}
);
t.test(
"%X{fnThatUsesLogEvent} should be able to use the logEvent",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{fnThatUsesLogEvent}",
"DEBUG"
);
assert.end();
}
);
t.test("%X should output the string stored in tokens", assert => {
testPattern(assert, layout, event, {}, "%X", "null");
assert.end();
});
t.end();
});
batch.test("layout makers", t => {
const layouts = require("../../lib/layouts");
t.test("should have a maker for each layout", assert => {
assert.ok(layouts.layout("messagePassThrough"));
assert.ok(layouts.layout("basic"));
assert.ok(layouts.layout("colored"));
assert.ok(layouts.layout("coloured"));
assert.ok(layouts.layout("pattern"));
assert.ok(layouts.layout("dummy"));
assert.end();
});
t.test(
"layout pattern maker should pass pattern and tokens to layout from config",
assert => {
let layout = layouts.layout("pattern", { pattern: "%%" });
assert.equal(layout({}), "%");
layout = layouts.layout("pattern", {
pattern: "%x{testStringToken}",
tokens: { testStringToken: "cheese" }
});
assert.equal(layout({}), "cheese");
assert.end();
}
);
t.end();
});
batch.test("add layout", t => {
const layouts = require("../../lib/layouts");
t.test("should be able to add a layout", assert => {
layouts.addLayout("test_layout", config => {
assert.equal(config, "test_config");
return function(logEvent) {
return `TEST LAYOUT >${logEvent.data}`;
};
});
const serializer = layouts.layout("test_layout", "test_config");
assert.ok(serializer);
assert.equal(serializer({ data: "INPUT" }), "TEST LAYOUT >INPUT");
assert.end();
});
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/cluster.js | 'use strict';
const cluster = require('cluster');
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: { type: 'stdout' }
},
categories: { default: { appenders: ['out'], level: 'debug' } }
});
let logger;
if (cluster.isMaster) {
logger = log4js.getLogger('master');
cluster.fork();
logger.info('master is done', process.pid, new Error('flaps'));
} else {
logger = log4js.getLogger('worker');
logger.info("I'm a worker, with pid ", process.pid, new Error('pants'));
logger.info("I'm a worker, with pid ", process.pid, new Error());
logger.info('cluster.worker ', cluster.worker);
cluster.worker.disconnect();
}
| 'use strict';
const cluster = require('cluster');
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: { type: 'stdout' }
},
categories: { default: { appenders: ['out'], level: 'debug' } }
});
let logger;
if (cluster.isMaster) {
logger = log4js.getLogger('master');
cluster.fork();
logger.info('master is done', process.pid, new Error('flaps'));
} else {
logger = log4js.getLogger('worker');
logger.info("I'm a worker, with pid ", process.pid, new Error('pants'));
logger.info("I'm a worker, with pid ", process.pid, new Error());
logger.info('cluster.worker ', cluster.worker);
cluster.worker.disconnect();
}
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/date-file-rolling.js | 'use strict';
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
file: {
type: 'dateFile', filename: 'thing.log', numBackups: 3, pattern: '.mm'
}
},
categories: {
default: { appenders: ['file'], level: 'debug' }
}
});
const logger = log4js.getLogger('thing');
setInterval(() => {
logger.info('just doing the thing');
}, 1000);
| 'use strict';
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
file: {
type: 'dateFile', filename: 'thing.log', numBackups: 3, pattern: '.mm'
}
},
categories: {
default: { appenders: ['file'], level: 'debug' }
}
});
const logger = log4js.getLogger('thing');
setInterval(() => {
logger.info('just doing the thing');
}, 1000);
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/cluster-test.js | const { test } = require("tap");
const cluster = require("cluster");
const log4js = require("../../lib/log4js");
const recorder = require("../../lib/appenders/recording");
log4js.configure({
appenders: {
vcr: { type: "recording" }
},
categories: { default: { appenders: ["vcr"], level: "debug" } }
});
if (cluster.isMaster) {
cluster.fork();
const masterLogger = log4js.getLogger("master");
const masterPid = process.pid;
masterLogger.info("this is master");
let workerLevel;
cluster.on("message", (worker, message) => {
if (worker.type || worker.topic) {
message = worker;
}
if (message.type && message.type === "::testing") {
workerLevel = message.level;
}
});
cluster.on("exit", worker => {
const workerPid = worker.process.pid;
const logEvents = recorder.replay();
test("cluster master", batch => {
batch.test("events should be logged", t => {
t.equal(logEvents.length, 3);
t.equal(logEvents[0].categoryName, "master");
t.equal(logEvents[0].pid, masterPid);
t.equal(logEvents[1].categoryName, "worker");
t.equal(logEvents[1].pid, workerPid);
// serialising errors with stacks intact
t.type(logEvents[1].data[1], "Error");
t.match(logEvents[1].data[1].stack, "Error: oh dear");
// serialising circular references in objects
t.type(logEvents[1].data[2], "object");
t.type(logEvents[1].data[2].me, "object");
// serialising errors with custom properties
t.type(logEvents[1].data[3], "Error");
t.match(logEvents[1].data[3].stack, "Error: wtf");
t.equal(logEvents[1].data[3].alert, "chartreuse");
// serialising things that are not errors, but look a bit like them
t.type(logEvents[1].data[4], "object");
t.equal(logEvents[1].data[4].stack, "this is not a stack trace");
t.equal(logEvents[2].categoryName, "log4js");
t.equal(logEvents[2].level.toString(), "ERROR");
t.equal(logEvents[2].data[0], "Unable to parse log:");
t.end();
});
batch.end();
});
test("cluster worker", batch => {
batch.test("logger should get correct config", t => {
t.equal(workerLevel, "DEBUG");
t.end();
});
batch.end();
});
});
} else {
const workerLogger = log4js.getLogger("worker");
// test for serialising circular references
const circle = {};
circle.me = circle;
// test for serialising errors with their own properties
const someError = new Error("wtf");
someError.alert = "chartreuse";
// test for serialising things that look like errors but aren't.
const notAnError = { stack: "this is not a stack trace" };
workerLogger.info(
"this is worker",
new Error("oh dear"),
circle,
someError,
notAnError
);
// can't run the test in the worker, things get weird
process.send({
type: "::testing",
level: workerLogger.level.toString()
});
// test sending a badly-formed log message
process.send({ topic: "log4js:message", data: { cheese: "gouda" } });
cluster.worker.disconnect();
}
| const { test } = require("tap");
const cluster = require("cluster");
const log4js = require("../../lib/log4js");
const recorder = require("../../lib/appenders/recording");
log4js.configure({
appenders: {
vcr: { type: "recording" }
},
categories: { default: { appenders: ["vcr"], level: "debug" } }
});
if (cluster.isMaster) {
cluster.fork();
const masterLogger = log4js.getLogger("master");
const masterPid = process.pid;
masterLogger.info("this is master");
let workerLevel;
cluster.on("message", (worker, message) => {
if (worker.type || worker.topic) {
message = worker;
}
if (message.type && message.type === "::testing") {
workerLevel = message.level;
}
});
cluster.on("exit", worker => {
const workerPid = worker.process.pid;
const logEvents = recorder.replay();
test("cluster master", batch => {
batch.test("events should be logged", t => {
t.equal(logEvents.length, 3);
t.equal(logEvents[0].categoryName, "master");
t.equal(logEvents[0].pid, masterPid);
t.equal(logEvents[1].categoryName, "worker");
t.equal(logEvents[1].pid, workerPid);
// serialising errors with stacks intact
t.type(logEvents[1].data[1], "Error");
t.match(logEvents[1].data[1].stack, "Error: oh dear");
// serialising circular references in objects
t.type(logEvents[1].data[2], "object");
t.type(logEvents[1].data[2].me, "object");
// serialising errors with custom properties
t.type(logEvents[1].data[3], "Error");
t.match(logEvents[1].data[3].stack, "Error: wtf");
t.equal(logEvents[1].data[3].alert, "chartreuse");
// serialising things that are not errors, but look a bit like them
t.type(logEvents[1].data[4], "object");
t.equal(logEvents[1].data[4].stack, "this is not a stack trace");
t.equal(logEvents[2].categoryName, "log4js");
t.equal(logEvents[2].level.toString(), "ERROR");
t.equal(logEvents[2].data[0], "Unable to parse log:");
t.end();
});
batch.end();
});
test("cluster worker", batch => {
batch.test("logger should get correct config", t => {
t.equal(workerLevel, "DEBUG");
t.end();
});
batch.end();
});
});
} else {
const workerLogger = log4js.getLogger("worker");
// test for serialising circular references
const circle = {};
circle.me = circle;
// test for serialising errors with their own properties
const someError = new Error("wtf");
someError.alert = "chartreuse";
// test for serialising things that look like errors but aren't.
const notAnError = { stack: "this is not a stack trace" };
workerLogger.info(
"this is worker",
new Error("oh dear"),
circle,
someError,
notAnError
);
// can't run the test in the worker, things get weird
process.send({
type: "::testing",
level: workerLogger.level.toString()
});
// test sending a badly-formed log message
process.send({ topic: "log4js:message", data: { cheese: "gouda" } });
cluster.worker.disconnect();
}
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./docs/tcp.md | # TCP Appender
The TCP appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. It's designed to work with the [tcp-server](tcp-server.md), but it doesn't necessarily have to, just make sure whatever is listening at the other end is expecting JSON objects as strings.
## Configuration
* `type` - `tcp`
* `port` - `integer` (optional, defaults to `5000`) - the port to send to
* `host` - `string` (optional, defaults to `localhost`) - the host/IP address to send to
* `endMsg` - `string` (optional, defaults to `__LOG4JS__`) - the delimiter that marks the end of a log message
* `layout` - `object` (optional, defaults to a serialized log event) - see [layouts](layouts.md)
## Example
```javascript
log4js.configure({
appenders: {
network: { type: 'tcp', host: 'log.server' }
},
categories: {
default: { appenders: ['network'], level: 'error' }
}
});
```
This will send all error messages to `log.server:5000`.
| # TCP Appender
The TCP appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. It's designed to work with the [tcp-server](tcp-server.md), but it doesn't necessarily have to, just make sure whatever is listening at the other end is expecting JSON objects as strings.
## Configuration
* `type` - `tcp`
* `port` - `integer` (optional, defaults to `5000`) - the port to send to
* `host` - `string` (optional, defaults to `localhost`) - the host/IP address to send to
* `endMsg` - `string` (optional, defaults to `__LOG4JS__`) - the delimiter that marks the end of a log message
* `layout` - `object` (optional, defaults to a serialized log event) - see [layouts](layouts.md)
## Example
```javascript
log4js.configure({
appenders: {
network: { type: 'tcp', host: 'log.server' }
},
categories: {
default: { appenders: ['network'], level: 'error' }
}
});
```
This will send all error messages to `log.server:5000`.
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./lib/LoggingEvent.js | const flatted = require('flatted');
const levels = require('./levels');
/**
* @name LoggingEvent
* @namespace Log4js
*/
class LoggingEvent {
/**
* Models a logging event.
* @constructor
* @param {string} categoryName name of category
* @param {Log4js.Level} level level of message
* @param {Array} data objects to log
* @author Seth Chisamore
*/
constructor(categoryName, level, data, context, location) {
this.startTime = new Date();
this.categoryName = categoryName;
this.data = data;
this.level = level;
this.context = Object.assign({}, context); // eslint-disable-line prefer-object-spread
this.pid = process.pid;
if (location) {
this.functionName = location.functionName;
this.fileName = location.fileName;
this.lineNumber = location.lineNumber;
this.columnNumber = location.columnNumber;
this.callStack = location.callStack;
}
}
serialise() {
return flatted.stringify(this, (key, value) => {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
// duck-typing for Error object
if (value && value.message && value.stack) {
// eslint-disable-next-line prefer-object-spread
value = Object.assign({message: value.message, stack: value.stack}, value);
}
// JSON.stringify({a: parseInt('abc'), b: 1/0, c: -1/0}) returns {a: null, b: null, c: null}.
// The following allows us to serialize to NaN, Infinity and -Infinity correctly.
else if (typeof value === 'number' && (Number.isNaN(value) || !Number.isFinite(value))) {
value = value.toString();
}
// JSON.stringify([undefined]) returns [null].
// The following allows us to serialize to undefined correctly.
else if (typeof value === 'undefined') {
value = typeof value;
}
return value;
});
}
static deserialise(serialised) {
let event;
try {
const rehydratedEvent = flatted.parse(serialised, (key, value) => {
if (value && value.message && value.stack) {
const fakeError = new Error(value);
Object.keys(value).forEach((k) => { fakeError[k] = value[k]; });
value = fakeError;
}
return value;
});
rehydratedEvent.location = {
functionName: rehydratedEvent.functionName,
fileName: rehydratedEvent.fileName,
lineNumber: rehydratedEvent.lineNumber,
columnNumber: rehydratedEvent.columnNumber,
callStack: rehydratedEvent.callStack
};
event = new LoggingEvent(
rehydratedEvent.categoryName,
levels.getLevel(rehydratedEvent.level.levelStr),
rehydratedEvent.data,
rehydratedEvent.context,
rehydratedEvent.location
);
event.startTime = new Date(rehydratedEvent.startTime);
event.pid = rehydratedEvent.pid;
event.cluster = rehydratedEvent.cluster;
} catch (e) {
event = new LoggingEvent(
'log4js',
levels.ERROR,
['Unable to parse log:', serialised, 'because: ', e]
);
}
return event;
}
}
module.exports = LoggingEvent;
| const flatted = require('flatted');
const levels = require('./levels');
/**
* @name LoggingEvent
* @namespace Log4js
*/
class LoggingEvent {
/**
* Models a logging event.
* @constructor
* @param {string} categoryName name of category
* @param {Log4js.Level} level level of message
* @param {Array} data objects to log
* @author Seth Chisamore
*/
constructor(categoryName, level, data, context, location) {
this.startTime = new Date();
this.categoryName = categoryName;
this.data = data;
this.level = level;
this.context = Object.assign({}, context); // eslint-disable-line prefer-object-spread
this.pid = process.pid;
if (location) {
this.functionName = location.functionName;
this.fileName = location.fileName;
this.lineNumber = location.lineNumber;
this.columnNumber = location.columnNumber;
this.callStack = location.callStack;
}
}
serialise() {
return flatted.stringify(this, (key, value) => {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
// duck-typing for Error object
if (value && value.message && value.stack) {
// eslint-disable-next-line prefer-object-spread
value = Object.assign({message: value.message, stack: value.stack}, value);
}
// JSON.stringify({a: parseInt('abc'), b: 1/0, c: -1/0}) returns {a: null, b: null, c: null}.
// The following allows us to serialize to NaN, Infinity and -Infinity correctly.
else if (typeof value === 'number' && (Number.isNaN(value) || !Number.isFinite(value))) {
value = value.toString();
}
// JSON.stringify([undefined]) returns [null].
// The following allows us to serialize to undefined correctly.
else if (typeof value === 'undefined') {
value = typeof value;
}
return value;
});
}
static deserialise(serialised) {
let event;
try {
const rehydratedEvent = flatted.parse(serialised, (key, value) => {
if (value && value.message && value.stack) {
const fakeError = new Error(value);
Object.keys(value).forEach((k) => { fakeError[k] = value[k]; });
value = fakeError;
}
return value;
});
rehydratedEvent.location = {
functionName: rehydratedEvent.functionName,
fileName: rehydratedEvent.fileName,
lineNumber: rehydratedEvent.lineNumber,
columnNumber: rehydratedEvent.columnNumber,
callStack: rehydratedEvent.callStack
};
event = new LoggingEvent(
rehydratedEvent.categoryName,
levels.getLevel(rehydratedEvent.level.levelStr),
rehydratedEvent.data,
rehydratedEvent.context,
rehydratedEvent.location
);
event.startTime = new Date(rehydratedEvent.startTime);
event.pid = rehydratedEvent.pid;
event.cluster = rehydratedEvent.cluster;
} catch (e) {
event = new LoggingEvent(
'log4js',
levels.ERROR,
['Unable to parse log:', serialised, 'because: ', e]
);
}
return event;
}
}
module.exports = LoggingEvent;
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/file-descriptor-leak-test.js | const { test } = require("tap");
const fs = require("fs");
const path = require("path");
const log4js = require("../../lib/log4js");
const removeFiles = async filenames => {
if (!Array.isArray(filenames))
filenames = [filenames];
const promises = filenames.map(filename => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
// no file descriptors on Windows, so don't run the tests
if (process.platform !== "win32") {
test("multiple log4js configure fd leak test", batch => {
const config = {
appenders: {},
categories: {
default: { appenders: [], level: 'debug' }
}
};
// create 11 appenders
const numOfAppenders = 11;
for (let i = 1; i <= numOfAppenders; i++) {
config.appenders[`app${i}`] = { type: 'file', filename: path.join(__dirname, `file${i}.log`) };
config.categories.default.appenders.push(`app${i}`);
}
const initialFd = fs.readdirSync('/proc/self/fd').length;
let loadedFd;
batch.test("initial log4js configure to increase file descriptor count", t => {
log4js.configure(config);
// wait for the file system to catch up
setTimeout(() => {
loadedFd = fs.readdirSync('/proc/self/fd').length;
t.equal(loadedFd, initialFd + numOfAppenders,
`file descriptor count should increase by ${numOfAppenders} after 1st configure() call`);
t.end();
}, 250);
});
batch.test("repeated log4js configure to not increase file descriptor count", t => {
log4js.configure(config);
log4js.configure(config);
log4js.configure(config);
// wait for the file system to catch up
setTimeout(() => {
t.equal(fs.readdirSync('/proc/self/fd').length, loadedFd,
`file descriptor count should be identical after repeated configure() calls`);
t.end();
}, 250);
});
batch.test("file descriptor count should return back to initial count", t => {
log4js.shutdown();
// wait for the file system to catch up
setTimeout(() => {
t.equal(fs.readdirSync('/proc/self/fd').length, initialFd,
`file descriptor count should be back to initial`);
t.end();
}, 250);
});
batch.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
const filenames = Object.values(config.appenders).map(appender => appender.filename);
await removeFiles(filenames);
});
batch.end();
});
} | const { test } = require("tap");
const fs = require("fs");
const path = require("path");
const log4js = require("../../lib/log4js");
const removeFiles = async filenames => {
if (!Array.isArray(filenames))
filenames = [filenames];
const promises = filenames.map(filename => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
// no file descriptors on Windows, so don't run the tests
if (process.platform !== "win32") {
test("multiple log4js configure fd leak test", batch => {
const config = {
appenders: {},
categories: {
default: { appenders: [], level: 'debug' }
}
};
// create 11 appenders
const numOfAppenders = 11;
for (let i = 1; i <= numOfAppenders; i++) {
config.appenders[`app${i}`] = { type: 'file', filename: path.join(__dirname, `file${i}.log`) };
config.categories.default.appenders.push(`app${i}`);
}
const initialFd = fs.readdirSync('/proc/self/fd').length;
let loadedFd;
batch.test("initial log4js configure to increase file descriptor count", t => {
log4js.configure(config);
// wait for the file system to catch up
setTimeout(() => {
loadedFd = fs.readdirSync('/proc/self/fd').length;
t.equal(loadedFd, initialFd + numOfAppenders,
`file descriptor count should increase by ${numOfAppenders} after 1st configure() call`);
t.end();
}, 250);
});
batch.test("repeated log4js configure to not increase file descriptor count", t => {
log4js.configure(config);
log4js.configure(config);
log4js.configure(config);
// wait for the file system to catch up
setTimeout(() => {
t.equal(fs.readdirSync('/proc/self/fd').length, loadedFd,
`file descriptor count should be identical after repeated configure() calls`);
t.end();
}, 250);
});
batch.test("file descriptor count should return back to initial count", t => {
log4js.shutdown();
// wait for the file system to catch up
setTimeout(() => {
t.equal(fs.readdirSync('/proc/self/fd').length, initialFd,
`file descriptor count should be back to initial`);
t.end();
}, 250);
});
batch.teardown(async () => {
await new Promise(resolve => { log4js.shutdown(resolve); });
const filenames = Object.values(config.appenders).map(appender => appender.filename);
await removeFiles(filenames);
});
batch.end();
});
} | -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/appender-dependencies-test.js | const { test } = require("tap");
const categories = {
default: { appenders: ["filtered"], level: "debug" }
}
let log4js;
let recording;
test("log4js appender dependencies", batch => {
batch.beforeEach(() => {
log4js = require("../../lib/log4js");
recording = require("../../lib/appenders/recording");
});
batch.afterEach(() => {
recording.erase();
});
batch.test("in order", t => {
const config = {
categories,
appenders: {
recorder: { type: "recording" },
filtered: {
type: "logLevelFilter",
appender: "recorder",
level: "ERROR"
}
}
};
t.test('should resolve if defined in dependency order', assert => {
assert.doesNotThrow(() => {
log4js.configure(config);
}, 'this should not trigger an error');
assert.end();
});
const logger = log4js.getLogger("logLevelTest");
logger.debug("this should not trigger an event");
logger.error("this should, though");
const logEvents = recording.replay();
t.test(
"should process log events normally",
assert => {
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], "this should, though");
assert.end();
}
);
t.end();
});
batch.test("not in order", t => {
const config = {
categories,
appenders: {
filtered: {
type: "logLevelFilter",
appender: "recorder",
level: "ERROR"
},
recorder: { type: "recording" },
}
};
t.test('should resolve if defined out of dependency order', assert => {
assert.doesNotThrow(() => {
log4js.configure(config);
}, 'this should not trigger an error');
assert.end();
});
const logger = log4js.getLogger("logLevelTest");
logger.debug("this should not trigger an event");
logger.error("this should, though");
const logEvents = recording.replay();
t.test(
"should process log events normally",
assert => {
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], "this should, though");
assert.end();
}
);
t.end();
});
batch.test("with dependency loop", t => {
const config = {
categories,
appenders: {
filtered: {
type: "logLevelFilter",
appender: "filtered2",
level: "ERROR"
},
filtered2: {
type: "logLevelFilter",
appender: "filtered",
level: "ERROR"
},
recorder: { type: "recording" },
}
};
t.test('should throw an error if if a dependency loop is found', assert => {
assert.throws(() => {
log4js.configure(config);
}, 'Dependency loop detected for appender filtered.');
assert.end();
});
t.end();
});
batch.end();
});
| const { test } = require("tap");
const categories = {
default: { appenders: ["filtered"], level: "debug" }
}
let log4js;
let recording;
test("log4js appender dependencies", batch => {
batch.beforeEach(() => {
log4js = require("../../lib/log4js");
recording = require("../../lib/appenders/recording");
});
batch.afterEach(() => {
recording.erase();
});
batch.test("in order", t => {
const config = {
categories,
appenders: {
recorder: { type: "recording" },
filtered: {
type: "logLevelFilter",
appender: "recorder",
level: "ERROR"
}
}
};
t.test('should resolve if defined in dependency order', assert => {
assert.doesNotThrow(() => {
log4js.configure(config);
}, 'this should not trigger an error');
assert.end();
});
const logger = log4js.getLogger("logLevelTest");
logger.debug("this should not trigger an event");
logger.error("this should, though");
const logEvents = recording.replay();
t.test(
"should process log events normally",
assert => {
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], "this should, though");
assert.end();
}
);
t.end();
});
batch.test("not in order", t => {
const config = {
categories,
appenders: {
filtered: {
type: "logLevelFilter",
appender: "recorder",
level: "ERROR"
},
recorder: { type: "recording" },
}
};
t.test('should resolve if defined out of dependency order', assert => {
assert.doesNotThrow(() => {
log4js.configure(config);
}, 'this should not trigger an error');
assert.end();
});
const logger = log4js.getLogger("logLevelTest");
logger.debug("this should not trigger an event");
logger.error("this should, though");
const logEvents = recording.replay();
t.test(
"should process log events normally",
assert => {
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], "this should, though");
assert.end();
}
);
t.end();
});
batch.test("with dependency loop", t => {
const config = {
categories,
appenders: {
filtered: {
type: "logLevelFilter",
appender: "filtered2",
level: "ERROR"
},
filtered2: {
type: "logLevelFilter",
appender: "filtered",
level: "ERROR"
},
recorder: { type: "recording" },
}
};
t.test('should throw an error if if a dependency loop is found', assert => {
assert.throws(() => {
log4js.configure(config);
}, 'Dependency loop detected for appender filtered.');
assert.end();
});
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./types/log4js.d.ts | // Type definitions for log4js
type Format = string | ((req: any, res: any, formatter: ((str: string) => string)) => string);
export interface Log4js {
getLogger(category?: string): Logger;
configure(filename: string): Log4js;
configure(config: Configuration): Log4js;
addLayout(name: string, config: (a: any) => (logEvent: LoggingEvent) => string): void;
connectLogger(logger: Logger, options: { format?: Format; level?: string; nolog?: any; }): any; // express.Handler;
levels: Levels;
shutdown(cb: (error: Error) => void): void | null;
}
export function getLogger(category?: string): Logger;
export function configure(filename: string): Log4js;
export function configure(config: Configuration): Log4js;
export function addLayout(name: string, config: (a: any) => (logEvent: LoggingEvent) => any): void;
export function connectLogger(logger: Logger, options: { format?: Format; level?: string; nolog?: any; statusRules?: any[], context?: boolean }): any; // express.Handler;
export function recording(): Recording;
export const levels: Levels;
export function shutdown(cb?: (error: Error) => void): void | null;
export interface BasicLayout {
type: 'basic';
}
export interface ColoredLayout {
type: 'colored' | 'coloured';
}
export interface MessagePassThroughLayout {
type: 'messagePassThrough';
}
export interface DummyLayout {
type: 'dummy';
}
export interface Level {
isEqualTo(other: string): boolean;
isEqualTo(otherLevel: Level): boolean;
isLessThanOrEqualTo(other: string): boolean;
isLessThanOrEqualTo(otherLevel: Level): boolean;
isGreaterThanOrEqualTo(other: string): boolean;
isGreaterThanOrEqualTo(otherLevel: Level): boolean;
colour: string;
level: number;
levelStr: string;
}
export interface LoggingEvent {
categoryName: string; // name of category
level: Level; // level of message
data: any[]; // objects to log
startTime: Date;
pid: number;
context: any;
cluster?: {
workerId: number;
worker: number;
};
functionName?: string;
fileName?: string;
lineNumber?: number;
columnNumber?: number;
callStack?: string;
}
export type Token = ((logEvent: LoggingEvent) => string) | string;
export interface PatternLayout {
type: 'pattern';
// specifier for the output format, using placeholders as described below
pattern: string;
// user-defined tokens to be used in the pattern
tokens?: { [name: string]: Token };
}
export interface CustomLayout {
[key: string]: any;
type: string;
}
export type Layout = BasicLayout | ColoredLayout | MessagePassThroughLayout | DummyLayout | PatternLayout | CustomLayout;
/**
* Category Filter
*
* @see https://log4js-node.github.io/log4js-node/categoryFilter.html
*/
export interface CategoryFilterAppender {
type: "categoryFilter";
// the category (or categories if you provide an array of values) that will be excluded from the appender.
exclude?: string | string[];
// the name of the appender to filter. see https://log4js-node.github.io/log4js-node/layouts.html
appender?: string;
}
/**
* No Log Filter
*
* @see https://log4js-node.github.io/log4js-node/noLogFilter.html
*/
export interface NoLogFilterAppender {
type: "noLogFilter";
// the regular expression (or the regular expressions if you provide an array of values)
// will be used for evaluating the events to pass to the appender.
// The events, which will match the regular expression, will be excluded and so not logged.
exclude: string | string[];
// the name of an appender, defined in the same configuration, that you want to filter.
appender: string;
}
/**
* Console Appender
*
* @see https://log4js-node.github.io/log4js-node/console.html
*/
export interface ConsoleAppender {
type: 'console';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface FileAppender {
type: 'file';
// the path of the file where you want your logs written.
filename: string;
// (defaults to MAX_SAFE_INTEGER) the maximum size (in bytes) for the log file.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
keepFileExt?: boolean;
// (defaults to .) the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
fileNameSep?: string;
}
export interface SyncfileAppender {
type: 'fileSync';
// the path of the file where you want your logs written.
filename: string;
// (defaults to undefined) the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
}
export interface DateFileAppender {
type: 'dateFile';
// the path of the file where you want your logs written.
filename: string;
// (defaults to yyyy-MM-dd) the pattern to use to determine when to roll the logs.
/**
* The following strings are recognised in the pattern:
* - yyyy : the full year, use yy for just the last two digits
* - MM : the month
* - dd : the day of the month
* - hh : the hour of the day (24-hour clock)
* - mm : the minute of the hour
* - ss : seconds
* - SSS : milliseconds (although I'm not sure you'd want to roll your logs every millisecond)
* - O : timezone (capital letter o)
*/
pattern?: string;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
keepFileExt?: boolean;
// (defaults to .) the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt)
fileNameSep?: string;
// (defaults to false) include the pattern in the name of the current log file.
alwaysIncludePattern?: boolean;
// (defaults to 1) the number of old files that matches the pattern to keep (excluding the hot file).
numBackups?: number;
}
export interface LogLevelFilterAppender {
type: 'logLevelFilter';
// the name of an appender, defined in the same configuration, that you want to filter
appender: string;
// the minimum level of event to allow through the filter
level: string;
// (defaults to FATAL) the maximum level of event to allow through the filter
maxLevel?: string;
}
export interface MultiFileAppender {
type: 'multiFile';
// the base part of the generated log filename
base: string;
// the value to use to split files (see below).
property: string;
// the suffix for the generated log filename.
extension: string;
}
export interface MultiprocessAppender {
type: 'multiprocess';
// controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server.
mode: 'master' | 'worker';
// (only needed if mode == master) the name of the appender to send the log events to
appender?: string;
// (defaults to 5000) the port to listen on, or send to
loggerPort?: number;
// (defaults to localhost) the host/IP address to listen on, or send to
loggerHost?: string;
}
export interface RecordingAppender {
type: 'recording';
}
export interface StandardErrorAppender {
type: 'stderr';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface StandardOutputAppender {
type: 'stdout';
// (defaults to ColoredLayout)
layout?: Layout;
}
/**
* TCP Appender
*
* @see https://log4js-node.github.io/log4js-node/tcp.html
*/
export interface TCPAppender {
type: 'tcp';
// (defaults to 5000)
port?: number
// (defaults to localhost)
host?: string
// (defaults to __LOG4JS__)
endMsg?: string
// (defaults to a serialized log event)
layout?: Layout;
}
export interface CustomAppender {
type: string | AppenderModule;
[key: string]: any;
}
export interface AppenderModule {
configure: (config: Config, layouts: LayoutsParam) => AppenderFunction;
}
export type AppenderFunction = (loggingEvent: LoggingEvent) => void;
// TODO: Actually add types here...
// It's supposed to be the full config element
export type Config = any
export interface LayoutsParam {
basicLayout: LayoutFunction;
messagePassThroughLayout: LayoutFunction;
patternLayout: LayoutFunction;
colouredLayout: LayoutFunction;
coloredLayout: LayoutFunction;
dummyLayout: LayoutFunction;
addLayout: (name: string, serializerGenerator: LayoutFunction) => void;
layout: (name: string, config: PatternToken) => LayoutFunction;
}
export interface PatternToken {
pattern: string; // TODO type this to enforce good pattern...
tokens: { [tokenName: string]: () => any };
}
export type LayoutFunction = (loggingEvent: LoggingEvent) => string;
export type Appender = CategoryFilterAppender
| ConsoleAppender
| FileAppender
| SyncfileAppender
| DateFileAppender
| LogLevelFilterAppender
| NoLogFilterAppender
| MultiFileAppender
| MultiprocessAppender
| RecordingAppender
| StandardErrorAppender
| StandardOutputAppender
| TCPAppender
| CustomAppender;
export interface Levels {
ALL: Level;
MARK: Level;
TRACE: Level;
DEBUG: Level;
INFO: Level;
WARN: Level;
ERROR: Level;
FATAL: Level;
OFF: Level;
levels: Level[];
getLevel(level: Level | string, defaultLevel?: Level): Level;
addLevels(customLevels: object): void;
}
export interface Configuration {
appenders: { [name: string]: Appender; };
categories: { [name: string]: { appenders: string[]; level: string; enableCallStack?: boolean; } };
pm2?: boolean;
pm2InstanceVar?: string;
levels?: Levels;
disableClustering?: boolean;
}
export interface Recording {
configure(loggingEvent: LoggingEvent): void
replay(): LoggingEvent[]
playback(): LoggingEvent[]
reset(): void
erase(): void
}
export class Logger {
constructor(name: string);
readonly category: string;
level: Level | string;
log(level: Level | string, ...args: any[]): void;
isLevelEnabled(level?: string): boolean;
isTraceEnabled(): boolean;
isDebugEnabled(): boolean;
isInfoEnabled(): boolean;
isWarnEnabled(): boolean;
isErrorEnabled(): boolean;
isFatalEnabled(): boolean;
_log(level: Level, data: any): void;
addContext(key: string, value: any): void;
removeContext(key: string): void;
clearContext(): void;
setParseCallStackFunction(parseFunction: Function): void;
trace(message: any, ...args: any[]): void;
debug(message: any, ...args: any[]): void;
info(message: any, ...args: any[]): void;
warn(message: any, ...args: any[]): void;
error(message: any, ...args: any[]): void;
fatal(message: any, ...args: any[]): void;
mark(message: any, ...args: any[]): void;
}
| // Type definitions for log4js
type Format = string | ((req: any, res: any, formatter: ((str: string) => string)) => string);
export interface Log4js {
getLogger(category?: string): Logger;
configure(filename: string): Log4js;
configure(config: Configuration): Log4js;
addLayout(name: string, config: (a: any) => (logEvent: LoggingEvent) => string): void;
connectLogger(logger: Logger, options: { format?: Format; level?: string; nolog?: any; }): any; // express.Handler;
levels: Levels;
shutdown(cb: (error: Error) => void): void | null;
}
export function getLogger(category?: string): Logger;
export function configure(filename: string): Log4js;
export function configure(config: Configuration): Log4js;
export function addLayout(name: string, config: (a: any) => (logEvent: LoggingEvent) => any): void;
export function connectLogger(logger: Logger, options: { format?: Format; level?: string; nolog?: any; statusRules?: any[], context?: boolean }): any; // express.Handler;
export function recording(): Recording;
export const levels: Levels;
export function shutdown(cb?: (error: Error) => void): void | null;
export interface BasicLayout {
type: 'basic';
}
export interface ColoredLayout {
type: 'colored' | 'coloured';
}
export interface MessagePassThroughLayout {
type: 'messagePassThrough';
}
export interface DummyLayout {
type: 'dummy';
}
export interface Level {
isEqualTo(other: string): boolean;
isEqualTo(otherLevel: Level): boolean;
isLessThanOrEqualTo(other: string): boolean;
isLessThanOrEqualTo(otherLevel: Level): boolean;
isGreaterThanOrEqualTo(other: string): boolean;
isGreaterThanOrEqualTo(otherLevel: Level): boolean;
colour: string;
level: number;
levelStr: string;
}
export interface LoggingEvent {
categoryName: string; // name of category
level: Level; // level of message
data: any[]; // objects to log
startTime: Date;
pid: number;
context: any;
cluster?: {
workerId: number;
worker: number;
};
functionName?: string;
fileName?: string;
lineNumber?: number;
columnNumber?: number;
callStack?: string;
}
export type Token = ((logEvent: LoggingEvent) => string) | string;
export interface PatternLayout {
type: 'pattern';
// specifier for the output format, using placeholders as described below
pattern: string;
// user-defined tokens to be used in the pattern
tokens?: { [name: string]: Token };
}
export interface CustomLayout {
[key: string]: any;
type: string;
}
export type Layout = BasicLayout | ColoredLayout | MessagePassThroughLayout | DummyLayout | PatternLayout | CustomLayout;
/**
* Category Filter
*
* @see https://log4js-node.github.io/log4js-node/categoryFilter.html
*/
export interface CategoryFilterAppender {
type: "categoryFilter";
// the category (or categories if you provide an array of values) that will be excluded from the appender.
exclude?: string | string[];
// the name of the appender to filter. see https://log4js-node.github.io/log4js-node/layouts.html
appender?: string;
}
/**
* No Log Filter
*
* @see https://log4js-node.github.io/log4js-node/noLogFilter.html
*/
export interface NoLogFilterAppender {
type: "noLogFilter";
// the regular expression (or the regular expressions if you provide an array of values)
// will be used for evaluating the events to pass to the appender.
// The events, which will match the regular expression, will be excluded and so not logged.
exclude: string | string[];
// the name of an appender, defined in the same configuration, that you want to filter.
appender: string;
}
/**
* Console Appender
*
* @see https://log4js-node.github.io/log4js-node/console.html
*/
export interface ConsoleAppender {
type: 'console';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface FileAppender {
type: 'file';
// the path of the file where you want your logs written.
filename: string;
// (defaults to MAX_SAFE_INTEGER) the maximum size (in bytes) for the log file.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
keepFileExt?: boolean;
// (defaults to .) the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
fileNameSep?: string;
}
export interface SyncfileAppender {
type: 'fileSync';
// the path of the file where you want your logs written.
filename: string;
// (defaults to undefined) the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
}
export interface DateFileAppender {
type: 'dateFile';
// the path of the file where you want your logs written.
filename: string;
// (defaults to yyyy-MM-dd) the pattern to use to determine when to roll the logs.
/**
* The following strings are recognised in the pattern:
* - yyyy : the full year, use yy for just the last two digits
* - MM : the month
* - dd : the day of the month
* - hh : the hour of the day (24-hour clock)
* - mm : the minute of the hour
* - ss : seconds
* - SSS : milliseconds (although I'm not sure you'd want to roll your logs every millisecond)
* - O : timezone (capital letter o)
*/
pattern?: string;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
keepFileExt?: boolean;
// (defaults to .) the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt)
fileNameSep?: string;
// (defaults to false) include the pattern in the name of the current log file.
alwaysIncludePattern?: boolean;
// (defaults to 1) the number of old files that matches the pattern to keep (excluding the hot file).
numBackups?: number;
}
export interface LogLevelFilterAppender {
type: 'logLevelFilter';
// the name of an appender, defined in the same configuration, that you want to filter
appender: string;
// the minimum level of event to allow through the filter
level: string;
// (defaults to FATAL) the maximum level of event to allow through the filter
maxLevel?: string;
}
export interface MultiFileAppender {
type: 'multiFile';
// the base part of the generated log filename
base: string;
// the value to use to split files (see below).
property: string;
// the suffix for the generated log filename.
extension: string;
}
export interface MultiprocessAppender {
type: 'multiprocess';
// controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server.
mode: 'master' | 'worker';
// (only needed if mode == master) the name of the appender to send the log events to
appender?: string;
// (defaults to 5000) the port to listen on, or send to
loggerPort?: number;
// (defaults to localhost) the host/IP address to listen on, or send to
loggerHost?: string;
}
export interface RecordingAppender {
type: 'recording';
}
export interface StandardErrorAppender {
type: 'stderr';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface StandardOutputAppender {
type: 'stdout';
// (defaults to ColoredLayout)
layout?: Layout;
}
/**
* TCP Appender
*
* @see https://log4js-node.github.io/log4js-node/tcp.html
*/
export interface TCPAppender {
type: 'tcp';
// (defaults to 5000)
port?: number
// (defaults to localhost)
host?: string
// (defaults to __LOG4JS__)
endMsg?: string
// (defaults to a serialized log event)
layout?: Layout;
}
export interface CustomAppender {
type: string | AppenderModule;
[key: string]: any;
}
export interface AppenderModule {
configure: (config: Config, layouts: LayoutsParam) => AppenderFunction;
}
export type AppenderFunction = (loggingEvent: LoggingEvent) => void;
// TODO: Actually add types here...
// It's supposed to be the full config element
export type Config = any
export interface LayoutsParam {
basicLayout: LayoutFunction;
messagePassThroughLayout: LayoutFunction;
patternLayout: LayoutFunction;
colouredLayout: LayoutFunction;
coloredLayout: LayoutFunction;
dummyLayout: LayoutFunction;
addLayout: (name: string, serializerGenerator: LayoutFunction) => void;
layout: (name: string, config: PatternToken) => LayoutFunction;
}
export interface PatternToken {
pattern: string; // TODO type this to enforce good pattern...
tokens: { [tokenName: string]: () => any };
}
export type LayoutFunction = (loggingEvent: LoggingEvent) => string;
export type Appender = CategoryFilterAppender
| ConsoleAppender
| FileAppender
| SyncfileAppender
| DateFileAppender
| LogLevelFilterAppender
| NoLogFilterAppender
| MultiFileAppender
| MultiprocessAppender
| RecordingAppender
| StandardErrorAppender
| StandardOutputAppender
| TCPAppender
| CustomAppender;
export interface Levels {
ALL: Level;
MARK: Level;
TRACE: Level;
DEBUG: Level;
INFO: Level;
WARN: Level;
ERROR: Level;
FATAL: Level;
OFF: Level;
levels: Level[];
getLevel(level: Level | string, defaultLevel?: Level): Level;
addLevels(customLevels: object): void;
}
export interface Configuration {
appenders: { [name: string]: Appender; };
categories: { [name: string]: { appenders: string[]; level: string; enableCallStack?: boolean; } };
pm2?: boolean;
pm2InstanceVar?: string;
levels?: Levels;
disableClustering?: boolean;
}
export interface Recording {
configure(loggingEvent: LoggingEvent): void
replay(): LoggingEvent[]
playback(): LoggingEvent[]
reset(): void
erase(): void
}
export class Logger {
constructor(name: string);
readonly category: string;
level: Level | string;
log(level: Level | string, ...args: any[]): void;
isLevelEnabled(level?: string): boolean;
isTraceEnabled(): boolean;
isDebugEnabled(): boolean;
isInfoEnabled(): boolean;
isWarnEnabled(): boolean;
isErrorEnabled(): boolean;
isFatalEnabled(): boolean;
_log(level: Level, data: any): void;
addContext(key: string, value: any): void;
removeContext(key: string): void;
clearContext(): void;
setParseCallStackFunction(parseFunction: Function): void;
trace(message: any, ...args: any[]): void;
debug(message: any, ...args: any[]): void;
info(message: any, ...args: any[]): void;
warn(message: any, ...args: any[]): void;
error(message: any, ...args: any[]): void;
fatal(message: any, ...args: any[]): void;
mark(message: any, ...args: any[]): void;
}
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/logFaces-appender.js | const log4js = require('../lib/log4js');
/*
logFaces server configured with UDP receiver, using JSON format,
listening on port 55201 will receive the logs from the appender below.
*/
log4js.configure({
appenders: {
logFaces: {
type: '@log4js-node/logfaces-udp', // (mandatory) appender type
application: 'MY-NODEJS', // (optional) name of the application (domain)
remoteHost: 'localhost', // (optional) logFaces server host or IP address
port: 55201, // (optional) logFaces UDP receiver port (must use JSON format)
layout: { // (optional) the layout to use for messages
type: 'pattern',
pattern: '%m'
}
}
},
categories: { default: { appenders: ['logFaces'], level: 'info' } }
});
const logger = log4js.getLogger('myLogger');
logger.info('Testing message %s', 'arg1');
| const log4js = require('../lib/log4js');
/*
logFaces server configured with UDP receiver, using JSON format,
listening on port 55201 will receive the logs from the appender below.
*/
log4js.configure({
appenders: {
logFaces: {
type: '@log4js-node/logfaces-udp', // (mandatory) appender type
application: 'MY-NODEJS', // (optional) name of the application (domain)
remoteHost: 'localhost', // (optional) logFaces server host or IP address
port: 55201, // (optional) logFaces UDP receiver port (must use JSON format)
layout: { // (optional) the layout to use for messages
type: 'pattern',
pattern: '%m'
}
}
},
categories: { default: { appenders: ['logFaces'], level: 'info' } }
});
const logger = log4js.getLogger('myLogger');
logger.info('Testing message %s', 'arg1');
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./.git/logs/refs/remotes/origin/HEAD | 0000000000000000000000000000000000000000 bd457888eb91b9e932fe8f66d720cf2d9d6442f4 jupyter <[email protected]> 1704849661 +0000 clone: from https://github.com/log4js-node/log4js-node.git
| 0000000000000000000000000000000000000000 bd457888eb91b9e932fe8f66d720cf2d9d6442f4 jupyter <[email protected]> 1704849661 +0000 clone: from https://github.com/log4js-node/log4js-node.git
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./examples/patternLayout-tokens.js | const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console',
layout: {
type: 'pattern',
pattern: '%[%r (%x{pid}) %p %c -%] %m%n',
tokens: {
pid: function () { return process.pid; }
}
}
}
},
categories: {
default: { appenders: ['out'], level: 'info' }
}
});
const logger = log4js.getLogger('app');
logger.info('Test log message');
| const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console',
layout: {
type: 'pattern',
pattern: '%[%r (%x{pid}) %p %c -%] %m%n',
tokens: {
pid: function () { return process.pid; }
}
}
}
},
categories: {
default: { appenders: ['out'], level: 'info' }
}
});
const logger = log4js.getLogger('app');
logger.info('Test log message');
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./test/tap/server-test.js | const { test } = require("tap");
const net = require("net");
const log4js = require("../../lib/log4js");
const vcr = require("../../lib/appenders/recording");
const levels = require("../../lib/levels");
const LoggingEvent = require("../../lib/LoggingEvent");
test("TCP Server", batch => {
batch.test(
"should listen for TCP messages and re-send via process.send",
t => {
log4js.configure({
appenders: {
vcr: { type: "recording" },
tcp: { type: "tcp-server", port: 5678 }
},
categories: {
default: { appenders: ["vcr"], level: "debug" }
}
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5678, () => {
socket.write(
`${new LoggingEvent(
"test-category",
levels.INFO,
["something"],
{}
).serialise()}__LOG4JS__${new LoggingEvent(
"test-category",
levels.INFO,
["something else"],
{}
).serialise()}__LOG4JS__some nonsense__LOG4JS__{"some":"json"}__LOG4JS__`,
() => {
socket.end();
setTimeout(() => {
log4js.shutdown(() => {
const logs = vcr.replay();
t.equal(logs.length, 4);
t.match(logs[0], {
data: ["something"],
categoryName: "test-category",
level: { levelStr: "INFO" },
context: {}
});
t.match(logs[1], {
data: ["something else"],
categoryName: "test-category",
level: { levelStr: "INFO" },
context: {}
});
t.match(logs[2], {
data: [
"Unable to parse log:",
"some nonsense",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[3], {
data: [
"Unable to parse log:",
'{"some":"json"}',
"because: ",
TypeError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.end();
});
}, 100);
}
);
});
socket.unref();
}, 100);
}
);
batch.test(
"sending incomplete messages in chunks",
t => {
log4js.configure({
appenders: {
vcr: { type: "recording" },
tcp: { type: "tcp-server" }
},
categories: {
default: { appenders: ["vcr"], level: "debug" }
}
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5000, () => {
const syncWrite = (dataArray, finalCallback) => {
if (!Array.isArray(dataArray)) {
dataArray = [dataArray];
}
if (typeof finalCallback !== "function") {
finalCallback = () => {};
}
setTimeout(() => {
if (!dataArray.length) {
finalCallback();
} else if (dataArray.length === 1) {
socket.write(dataArray.shift(), finalCallback);
} else {
socket.write(dataArray.shift(), () => { syncWrite(dataArray, finalCallback); });
}
}, 100);
};
const dataArray = [
"__LOG4JS__",
"Hello__LOG4JS__World",
"__LOG4JS__",
"testing nonsense",
`__LOG4JS__more nonsense__LOG4JS__`
];
const finalCallback = () => {
socket.end();
setTimeout(() => {
log4js.shutdown(() => {
const logs = vcr.replay();
t.equal(logs.length, 8);
t.match(logs[4], {
data: [
"Unable to parse log:",
"Hello",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[5], {
data: [
"Unable to parse log:",
"World",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[6], {
data: [
"Unable to parse log:",
"testing nonsense",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[7], {
data: [
"Unable to parse log:",
"more nonsense",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.end();
});
}, 100);
};
syncWrite(dataArray, finalCallback);
});
socket.unref();
}, 100);
}
);
batch.end();
});
| const { test } = require("tap");
const net = require("net");
const log4js = require("../../lib/log4js");
const vcr = require("../../lib/appenders/recording");
const levels = require("../../lib/levels");
const LoggingEvent = require("../../lib/LoggingEvent");
test("TCP Server", batch => {
batch.test(
"should listen for TCP messages and re-send via process.send",
t => {
log4js.configure({
appenders: {
vcr: { type: "recording" },
tcp: { type: "tcp-server", port: 5678 }
},
categories: {
default: { appenders: ["vcr"], level: "debug" }
}
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5678, () => {
socket.write(
`${new LoggingEvent(
"test-category",
levels.INFO,
["something"],
{}
).serialise()}__LOG4JS__${new LoggingEvent(
"test-category",
levels.INFO,
["something else"],
{}
).serialise()}__LOG4JS__some nonsense__LOG4JS__{"some":"json"}__LOG4JS__`,
() => {
socket.end();
setTimeout(() => {
log4js.shutdown(() => {
const logs = vcr.replay();
t.equal(logs.length, 4);
t.match(logs[0], {
data: ["something"],
categoryName: "test-category",
level: { levelStr: "INFO" },
context: {}
});
t.match(logs[1], {
data: ["something else"],
categoryName: "test-category",
level: { levelStr: "INFO" },
context: {}
});
t.match(logs[2], {
data: [
"Unable to parse log:",
"some nonsense",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[3], {
data: [
"Unable to parse log:",
'{"some":"json"}',
"because: ",
TypeError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.end();
});
}, 100);
}
);
});
socket.unref();
}, 100);
}
);
batch.test(
"sending incomplete messages in chunks",
t => {
log4js.configure({
appenders: {
vcr: { type: "recording" },
tcp: { type: "tcp-server" }
},
categories: {
default: { appenders: ["vcr"], level: "debug" }
}
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5000, () => {
const syncWrite = (dataArray, finalCallback) => {
if (!Array.isArray(dataArray)) {
dataArray = [dataArray];
}
if (typeof finalCallback !== "function") {
finalCallback = () => {};
}
setTimeout(() => {
if (!dataArray.length) {
finalCallback();
} else if (dataArray.length === 1) {
socket.write(dataArray.shift(), finalCallback);
} else {
socket.write(dataArray.shift(), () => { syncWrite(dataArray, finalCallback); });
}
}, 100);
};
const dataArray = [
"__LOG4JS__",
"Hello__LOG4JS__World",
"__LOG4JS__",
"testing nonsense",
`__LOG4JS__more nonsense__LOG4JS__`
];
const finalCallback = () => {
socket.end();
setTimeout(() => {
log4js.shutdown(() => {
const logs = vcr.replay();
t.equal(logs.length, 8);
t.match(logs[4], {
data: [
"Unable to parse log:",
"Hello",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[5], {
data: [
"Unable to parse log:",
"World",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[6], {
data: [
"Unable to parse log:",
"testing nonsense",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[7], {
data: [
"Unable to parse log:",
"more nonsense",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.end();
});
}, 100);
};
syncWrite(dataArray, finalCallback);
});
socket.unref();
}, 100);
}
);
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./docs/webpack.md | # Working with webpack
Log4js uses dynamic require for loading appenders. Webpack doesn't know at build time which appender will be used at runtime so a small workaround is necessary.
```
const stdout = require('log4js/lib/appenders/stdout');
import * as Configuration from 'log4js/lib/configuration';
Configuration.prototype.loadAppenderModule = function(type) {
return stdout;
};
```
| # Working with webpack
Log4js uses dynamic require for loading appenders. Webpack doesn't know at build time which appender will be used at runtime so a small workaround is necessary.
```
const stdout = require('log4js/lib/appenders/stdout');
import * as Configuration from 'log4js/lib/configuration';
Configuration.prototype.loadAppenderModule = function(type) {
return stdout;
};
```
| -1 |
log4js-node/log4js-node | 1,247 | fix: fallback for logger.log outputs nothing | Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| lamweili | 2022-05-19T17:12:01Z | 2022-05-19T17:19:20Z | b81c08b1f9e8d6fe489e6c424a11343910bfdc1c | 47ecd68ce5b1988870a4b613acb3c66f37caa254 | fix: fallback for logger.log outputs nothing. Related to #1062 (which fixes #1042)
Before Patch:

After Patch:

| ./.git/hooks/prepare-commit-msg.sample | #!/bin/sh
#
# An example hook script to prepare the commit log message.
# Called by "git commit" with the name of the file that has the
# commit message, followed by the description of the commit
# message's source. The hook's purpose is to edit the commit
# message file. If the hook fails with a non-zero status,
# the commit is aborted.
#
# To enable this hook, rename this file to "prepare-commit-msg".
# This hook includes three examples. The first one removes the
# "# Please enter the commit message..." help message.
#
# The second includes the output of "git diff --name-status -r"
# into the message, just before the "git status" output. It is
# commented because it doesn't cope with --amend or with squashed
# commits.
#
# The third example adds a Signed-off-by line to the message, that can
# still be edited. This is rarely a good idea.
COMMIT_MSG_FILE=$1
COMMIT_SOURCE=$2
SHA1=$3
/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
# case "$COMMIT_SOURCE,$SHA1" in
# ,|template,)
# /usr/bin/perl -i.bak -pe '
# print "\n" . `git diff --cached --name-status -r`
# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
# *) ;;
# esac
# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
# if test -z "$COMMIT_SOURCE"
# then
# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
# fi
| #!/bin/sh
#
# An example hook script to prepare the commit log message.
# Called by "git commit" with the name of the file that has the
# commit message, followed by the description of the commit
# message's source. The hook's purpose is to edit the commit
# message file. If the hook fails with a non-zero status,
# the commit is aborted.
#
# To enable this hook, rename this file to "prepare-commit-msg".
# This hook includes three examples. The first one removes the
# "# Please enter the commit message..." help message.
#
# The second includes the output of "git diff --name-status -r"
# into the message, just before the "git status" output. It is
# commented because it doesn't cope with --amend or with squashed
# commits.
#
# The third example adds a Signed-off-by line to the message, that can
# still be edited. This is rarely a good idea.
COMMIT_MSG_FILE=$1
COMMIT_SOURCE=$2
SHA1=$3
/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
# case "$COMMIT_SOURCE,$SHA1" in
# ,|template,)
# /usr/bin/perl -i.bak -pe '
# print "\n" . `git diff --cached --name-status -r`
# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
# *) ;;
# esac
# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
# if test -z "$COMMIT_SOURCE"
# then
# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
# fi
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/connect-logger.md | # Connect / Express Logger
The connect/express logger was added to log4js by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See `example-connect-logger.js`.
```javascript
var log4js = require('log4js');
var express = require('express');
log4js.configure({
appenders: {
console: { type: 'console' },
file: { type: 'file', filename: 'cheese.log' }
},
categories: {
cheese: { appenders: ['file'], level: 'info' },
default: { appenders: ['console'], level: 'info' }
}
});
var logger = log4js.getLogger('cheese');
var app = express();
app.use(log4js.connectLogger(logger, { level: 'info' }));
app.get('/', function(req,res) {
res.send('hello world');
});
app.listen(5000);
```
The log4js.connectLogger supports the passing of an options object that can be used to set the following:
- log level
- log format string or function (the same as the connect/express logger)
- nolog expressions (represented as a string, regexp, or array)
- status code rulesets
For example:
```javascript
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO, format: ':method :url' }));
```
or:
```javascript
app.use(log4js.connectLogger(logger, {
level: 'auto',
// include the Express request ID in the logs
format: (req, res, format) => format(`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`)
}));
```
When you request of POST, you want to log the request body parameter like JSON.
The log format function is very useful.
Please use log format function instead “tokens” property for use express's request or response.
```javascript
app.use(log4js.connectLogger(logger, {
level: 'info',
format: (req, res, format) => format(`:remote-addr :method :url ${JSON.stringify(req.body)}`)
}));
```
Added automatic level detection to connect-logger, depends on http status response, compatible with express 3.x and 4.x.
* http responses 3xx, level = WARN
* http responses 4xx & 5xx, level = ERROR
* else, level = INFO
```javascript
app.use(log4js.connectLogger(logger, { level: 'auto' }));
```
The levels of returned status codes can be configured via status code rulesets.
```javascript
app.use(log4js.connectLogger(logger, { level: 'auto', statusRules: [
{ from: 200, to: 299, level: 'debug' },
{ codes: [303, 304], level: 'info' }
]}));
```
The log4js.connectLogger also supports a nolog option where you can specify a string, regexp, or array to omit certain log messages. Example of 1.2 below.
```javascript
app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url', nolog: '\\.gif|\\.jpg$' }));
```
The log4js.connectLogger can add a response of express to context if `context` flag is set to `true`.
Application can use it in layouts or appenders.
In application:
```javascript
app.use(log4js.connectLogger(logger, { context: true }));
```
In layout:
```javascript
log4js.addLayout('customLayout', () => {
return (loggingEvent) => {
const res = loggingEvent.context.res;
return util.format(...loggingEvent.data, res ? `status: ${res.statusCode}` : '');
};
});
```
## Example nolog values
| nolog value | Will Not Log | Will Log |
|-------------|--------------|----------|
| `"\\.gif"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga | http://example.com/hoge.agif |
| `"\\.gif\|\\.jpg$"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga http://example.com/hoge.jpg?fuga | http://example.com/hoge.agif http://example.com/hoge.ajpg http://example.com/hoge.jpg?hoge |
| `"\\.(gif\|jpe?g\|png)$"` | http://example.com/hoge.gif http://example.com/hoge.jpeg | http://example.com/hoge.gif?uid=2 http://example.com/hoge.jpg?pid=3 |
| `/\.(gif\|jpe?g\|png)$/` | as above | as above |
| `["\\.jpg$", "\\.png", "\\.gif"]` | same as `"\\.jpg\|\\.png\|\\.gif"` | same as `"\\.jpg\|\\.png\|\\.gif"` |
| # Connect / Express Logger
The connect/express logger was added to log4js by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See `example-connect-logger.js`.
```javascript
var log4js = require('log4js');
var express = require('express');
log4js.configure({
appenders: {
console: { type: 'console' },
file: { type: 'file', filename: 'cheese.log' }
},
categories: {
cheese: { appenders: ['file'], level: 'info' },
default: { appenders: ['console'], level: 'info' }
}
});
var logger = log4js.getLogger('cheese');
var app = express();
app.use(log4js.connectLogger(logger, { level: 'info' }));
app.get('/', function(req,res) {
res.send('hello world');
});
app.listen(5000);
```
The log4js.connectLogger supports the passing of an options object that can be used to set the following:
- log level
- log format string or function (the same as the connect/express logger)
- nolog expressions (represented as a string, regexp, or array)
- status code rulesets
For example:
```javascript
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO, format: ':method :url' }));
```
or:
```javascript
app.use(log4js.connectLogger(logger, {
level: 'auto',
// include the Express request ID in the logs
format: (req, res, format) => format(`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`)
}));
```
When you request of POST, you want to log the request body parameter like JSON.
The log format function is very useful.
Please use log format function instead "tokens" property for use express's request or response.
```javascript
app.use(log4js.connectLogger(logger, {
level: 'info',
format: (req, res, format) => format(`:remote-addr :method :url ${JSON.stringify(req.body)}`)
}));
```
Added automatic level detection to connect-logger, depends on http status response, compatible with express 3.x and 4.x.
* http responses 3xx, level = WARN
* http responses 4xx & 5xx, level = ERROR
* else, level = INFO
```javascript
app.use(log4js.connectLogger(logger, { level: 'auto' }));
```
The levels of returned status codes can be configured via status code rulesets.
```javascript
app.use(log4js.connectLogger(logger, { level: 'auto', statusRules: [
{ from: 200, to: 299, level: 'debug' },
{ codes: [303, 304], level: 'info' }
]}));
```
The log4js.connectLogger also supports a nolog option where you can specify a string, regexp, or array to omit certain log messages. Example of 1.2 below.
```javascript
app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url', nolog: '\\.gif|\\.jpg$' }));
```
The log4js.connectLogger can add a response of express to context if `context` flag is set to `true`.
Application can use it in layouts or appenders.
In application:
```javascript
app.use(log4js.connectLogger(logger, { context: true }));
```
In layout:
```javascript
log4js.addLayout('customLayout', () => {
return (loggingEvent) => {
const res = loggingEvent.context.res;
return util.format(...loggingEvent.data, res ? `status: ${res.statusCode}` : '');
};
});
```
## Example nolog values
| nolog value | Will Not Log | Will Log |
|-------------|--------------|----------|
| `"\\.gif"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga | http://example.com/hoge.agif |
| `"\\.gif\|\\.jpg$"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga http://example.com/hoge.jpg?fuga | http://example.com/hoge.agif http://example.com/hoge.ajpg http://example.com/hoge.jpg?hoge |
| `"\\.(gif\|jpe?g\|png)$"` | http://example.com/hoge.gif http://example.com/hoge.jpeg | http://example.com/hoge.gif?uid=2 http://example.com/hoge.jpg?pid=3 |
| `/\.(gif\|jpe?g\|png)$/` | as above | as above |
| `["\\.jpg$", "\\.png", "\\.gif"]` | same as `"\\.jpg\|\\.png\|\\.gif"` | same as `"\\.jpg\|\\.png\|\\.gif"` |
| 1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/file.md | # File Appender
The file appender writes log events to a file. It supports an optional maximum file size, and will keep a configurable number of backups. When using the file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
* `type` - `"file"`
* `filename` - `string` - the path of the file where you want your logs written.
* `maxLogSize` - `integer` (optional, defaults to MAX_SAFE_INTEGER) - the maximum size (in bytes) for the log file.
* `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling.
* `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
* `encoding` - `string` (default "utf-8")
* `mode`- `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
* `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
* `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
* `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
* `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [dateFile appender](dateFile.md) as well. So you could roll files by both date and size.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
```
This example will result in a single log file (`all-the-logs.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log', maxLogSize: 10485760, backups: 3, compress: true }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug'}
}
});
```
This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: 'file', filename: 'out.log' }
},
categories: { default: { appenders: ['output'], level: 'debug'}}
});
let paused = false;
process.on("log4js:pause", (value) => paused = value);
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| # File Appender
The file appender writes log events to a file. It supports an optional maximum file size, and will keep a configurable number of backups. When using the file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
* `type` - `"file"`
* `filename` - `string` - the path of the file where you want your logs written.
* `maxLogSize` - `integer` (optional, defaults to MAX_SAFE_INTEGER) - the maximum size (in bytes) for the log file.
* `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling (excluding the hot file).
* `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
* `encoding` - `string` (default "utf-8")
* `mode`- `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
* `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
* `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
* `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
* `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [dateFile appender](dateFile.md) as well. So you could roll files by both date and size.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
```
This example will result in a single log file (`all-the-logs.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log', maxLogSize: 10485760, backups: 3, compress: true }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug'}
}
});
```
This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: 'file', filename: 'out.log' }
},
categories: { default: { appenders: ['output'], level: 'debug'}}
});
let paused = false;
process.on("log4js:pause", (value) => paused = value);
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| 1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/fileSync.md | # Synchronous File Appender
The sync file appender writes log events to a file, the only difference to the normal file appender is that all the writes are synchronous. This can make writing tests easier, or in situations where you need an absolute guarantee that a log message has been written to the file. Making synchronous I/O calls does mean you lose a lot of the benefits of using node.js though. It supports an optional maximum file size, and will keep a configurable number of backups. Note that the synchronous file appender, unlike the asynchronous version, does not support compressing the backup files.
## Configuration
* `type` - `"fileSync"`
* `filename` - `string` - the path of the file where you want your logs written.
* `maxLogSize` - `integer` (optional) - the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
* `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling.
* `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying node.js core stream implementation:
* `encoding` - `string` (default "utf-8")
* `mode`- `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
* `flags` - `string` (default 'a')
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'fileSync', filename: 'all-the-logs.log' }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
```
This example will result in a single log file (`all-the-logs.log`) containing the log messages.
## Example with log rolling
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log', maxLogSize: 10458760, backups: 3 }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug'}
}
});
```
This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on.
| # Synchronous File Appender
The sync file appender writes log events to a file, the only difference to the normal file appender is that all the writes are synchronous. This can make writing tests easier, or in situations where you need an absolute guarantee that a log message has been written to the file. Making synchronous I/O calls does mean you lose a lot of the benefits of using node.js though. It supports an optional maximum file size, and will keep a configurable number of backups. Note that the synchronous file appender, unlike the asynchronous version, does not support compressing the backup files.
## Configuration
* `type` - `"fileSync"`
* `filename` - `string` - the path of the file where you want your logs written.
* `maxLogSize` - `integer` (optional) - the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
* `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling (excluding the hot file).
* `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying node.js core stream implementation:
* `encoding` - `string` (default "utf-8")
* `mode`- `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
* `flags` - `string` (default 'a')
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'fileSync', filename: 'all-the-logs.log' }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
```
This example will result in a single log file (`all-the-logs.log`) containing the log messages.
## Example with log rolling
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log', maxLogSize: 10458760, backups: 3 }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug'}
}
});
```
This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on.
| 1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./types/log4js.d.ts | // Type definitions for log4js
type Format = string | ((req: any, res: any, formatter: ((str: string) => string)) => string);
export interface Log4js {
getLogger(category?: string): Logger;
configure(filename: string): Log4js;
configure(config: Configuration): Log4js;
addLayout(name: string, config: (a: any) => (logEvent: LoggingEvent) => string): void;
connectLogger(logger: Logger, options: { format?: Format; level?: string; nolog?: any; }): any; // express.Handler;
levels: Levels;
shutdown(cb: (error: Error) => void): void | null;
}
export function getLogger(category?: string): Logger;
export function configure(filename: string): Log4js;
export function configure(config: Configuration): Log4js;
export function addLayout(name: string, config: (a: any) => (logEvent: LoggingEvent) => any): void;
export function connectLogger(logger: Logger, options: { format?: Format; level?: string; nolog?: any; statusRules?: any[], context?: boolean }): any; // express.Handler;
export function recording(): Recording;
export const levels: Levels;
export function shutdown(cb?: (error: Error) => void): void | null;
export interface BaseLayout {
type: 'basic';
}
export interface ColoredLayout {
type: 'colored' | 'coloured';
}
export interface MessagePassThroughLayout {
type: 'messagePassThrough';
}
export interface DummyLayout {
type: 'dummy';
}
export interface Level {
isEqualTo(other: string): boolean;
isEqualTo(otherLevel: Level): boolean;
isLessThanOrEqualTo(other: string): boolean;
isLessThanOrEqualTo(otherLevel: Level): boolean;
isGreaterThanOrEqualTo(other: string): boolean;
isGreaterThanOrEqualTo(otherLevel: Level): boolean;
colour: string;
level: number;
levelStr: string;
}
export interface LoggingEvent {
categoryName: string; // name of category
level: Level; // level of message
data: any[]; // objects to log
startTime: Date;
pid: number;
context: any;
cluster?: {
workerId: number;
worker: number;
};
functionName?: string;
fileName?: string;
lineNumber?: number;
columnNumber?: number;
callStack?: string;
}
export type Token = ((logEvent: LoggingEvent) => string) | string;
export interface PatternLayout {
type: 'pattern';
// specifier for the output format, using placeholders as described below
pattern: string;
// user-defined tokens to be used in the pattern
tokens?: { [name: string]: Token };
}
export interface CustomLayout {
[key: string]: any;
type: string;
}
export type Layout = BaseLayout | ColoredLayout | MessagePassThroughLayout | DummyLayout | PatternLayout | CustomLayout;
/**
* Category Filter
*
* @see https://log4js-node.github.io/log4js-node/categoryFilter.html
*/
export interface CategoryFilterAppender {
type: "categoryFilter";
// the category (or categories if you provide an array of values) that will be excluded from the appender.
exclude?: string | string[];
// the name of the appender to filter. see https://log4js-node.github.io/log4js-node/layouts.html
appender?: string;
}
/**
* No Log Filter
*
* @see https://log4js-node.github.io/log4js-node/noLogFilter.html
*/
export interface NoLogFilterAppender {
type: "noLogFilter";
// the regular expression (or the regular expressions if you provide an array of values)
// will be used for evaluating the events to pass to the appender.
// The events, which will match the regular expression, will be excluded and so not logged.
exclude: string | string[];
// the name of an appender, defined in the same configuration, that you want to filter.
appender: string;
}
/**
* Console Appender
*
* @see https://log4js-node.github.io/log4js-node/console.html
*/
export interface ConsoleAppender {
type: 'console';
// defaults to colouredLayout
layout?: Layout;
}
export interface FileAppender {
type: 'file';
// the path of the file where you want your logs written.
filename: string;
// the maximum size (in bytes) for the log file. If not specified, then no log rolling will happen.
maxLogSize?: number | string;
// (default value = 5) - the number of old log files to keep during log rolling.
backups?: number;
// defaults to basic layout
layout?: Layout;
compress?: boolean; // compress the backups
// keep the file extension when rotating logs
keepFileExt?: boolean;
encoding?: string;
mode?: number;
flags?: string;
}
export interface SyncfileAppender {
type: 'fileSync';
// the path of the file where you want your logs written.
filename: string;
// the maximum size (in bytes) for the log file. If not specified, then no log rolling will happen.
maxLogSize?: number | string;
// (default value = 5) - the number of old log files to keep during log rolling.
backups?: number;
// defaults to basic layout
layout?: Layout;
encoding?: string;
mode?: number;
flags?: string;
}
export interface DateFileAppender {
type: 'dateFile';
// the path of the file where you want your logs written.
filename: string;
// defaults to basic layout
layout?: Layout;
// defaults to .yyyy-MM-dd - the pattern to use to determine when to roll the logs.
/**
* The following strings are recognised in the pattern:
* - yyyy : the full year, use yy for just the last two digits
* - MM : the month
* - dd : the day of the month
* - hh : the hour of the day (24-hour clock)
* - mm : the minute of the hour
* - ss : seconds
* - SSS : milliseconds (although I'm not sure you'd want to roll your logs every millisecond)
* - O : timezone (capital letter o)
*/
pattern?: string;
// default “utf-8”
encoding?: string;
// default 0600
mode?: number;
// default ‘a’
flags?: string;
// compress the backup files during rolling (backup files will have .gz extension)(default false)
compress?: boolean;
// include the pattern in the name of the current log file as well as the backups.(default false)
alwaysIncludePattern?: boolean;
// keep the file extension when rotating logs
keepFileExt?: boolean;
// if this value is greater than zero, then files older than that many days will be deleted during log rolling.(default 0)
numBackups?: number;
}
export interface LogLevelFilterAppender {
type: 'logLevelFilter';
// the name of an appender, defined in the same configuration, that you want to filter
appender: string;
// the minimum level of event to allow through the filter
level: string;
// (defaults to FATAL) - the maximum level of event to allow through the filter
maxLevel?: string;
}
export interface MultiFileAppender {
type: 'multiFile';
// the base part of the generated log filename
base: string;
// the value to use to split files (see below).
property: string;
// the suffix for the generated log filename.
extension: string;
}
export interface MultiprocessAppender {
type: 'multiprocess';
// controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server.
mode: 'master' | 'worker';
// (only needed if mode == master)- the name of the appender to send the log events to
appender?: string;
// (defaults to 5000) - the port to listen on, or send to
loggerPort?: number;
// (defaults to localhost) - the host/IP address to listen on, or send to
loggerHost?: string;
}
export interface RecordingAppender {
type: 'recording';
}
export interface StandardErrorAppender {
type: 'stderr';
// (defaults to colouredLayout)
layout?: Layout;
}
export interface StandardOutputAppender {
type: 'stdout';
// (defaults to colouredLayout)
layout?: Layout;
}
/**
* TCP Appender
*
* @see https://log4js-node.github.io/log4js-node/tcp.html
*/
export interface TCPAppender {
type: 'tcp';
// defaults to 5000
port?: number
// defaults to localhost
host?: string
// default to __LOG4JS__
endMsg?: string
// defaults to a serialized log event
layout?: Layout;
}
export interface CustomAppender {
type: string | AppenderModule;
[key: string]: any;
}
export interface AppenderModule {
configure: (config: Config, layouts: LayoutsParam) => AppenderFunction;
}
export type AppenderFunction = (loggingEvent: LoggingEvent) => void;
// TODO: Actually add types here...
// It's supposed to be the full config element
export type Config = any
export interface LayoutsParam {
basicLayout: LayoutFunction;
messagePassThroughLayout: LayoutFunction;
patternLayout: LayoutFunction;
colouredLayout: LayoutFunction;
coloredLayout: LayoutFunction;
dummyLayout: LayoutFunction;
addLayout: (name: string, serializerGenerator: LayoutFunction) => void;
layout: (name: string, config: PatternToken) => LayoutFunction;
}
export interface PatternToken {
pattern: string; // TODO type this to enforce good pattern...
tokens: { [tokenName: string]: () => any };
}
export type LayoutFunction = (loggingEvent: LoggingEvent) => string;
export type Appender = CategoryFilterAppender
| ConsoleAppender
| FileAppender
| SyncfileAppender
| DateFileAppender
| LogLevelFilterAppender
| NoLogFilterAppender
| MultiFileAppender
| MultiprocessAppender
| RecordingAppender
| StandardErrorAppender
| StandardOutputAppender
| TCPAppender
| CustomAppender;
export interface Levels {
ALL: Level;
MARK: Level;
TRACE: Level;
DEBUG: Level;
INFO: Level;
WARN: Level;
ERROR: Level;
FATAL: Level;
OFF: Level;
levels: Level[];
getLevel(level: Level | string, defaultLevel?: Level): Level;
addLevels(customLevels: object): void;
}
export interface Configuration {
appenders: { [name: string]: Appender; };
categories: { [name: string]: { appenders: string[]; level: string; enableCallStack?: boolean; } };
pm2?: boolean;
pm2InstanceVar?: string;
levels?: Levels;
disableClustering?: boolean;
}
export interface Recording {
configure(loggingEvent: LoggingEvent): void
replay(): LoggingEvent[]
playback(): LoggingEvent[]
reset(): void
erase(): void
}
export class Logger {
constructor(name: string);
readonly category: string;
level: Level | string;
log(level: Level | string, ...args: any[]): void;
isLevelEnabled(level?: string): boolean;
isTraceEnabled(): boolean;
isDebugEnabled(): boolean;
isInfoEnabled(): boolean;
isWarnEnabled(): boolean;
isErrorEnabled(): boolean;
isFatalEnabled(): boolean;
_log(level: Level, data: any): void;
addContext(key: string, value: any): void;
removeContext(key: string): void;
clearContext(): void;
setParseCallStackFunction(parseFunction: Function): void;
trace(message: any, ...args: any[]): void;
debug(message: any, ...args: any[]): void;
info(message: any, ...args: any[]): void;
warn(message: any, ...args: any[]): void;
error(message: any, ...args: any[]): void;
fatal(message: any, ...args: any[]): void;
mark(message: any, ...args: any[]): void;
}
| // Type definitions for log4js
type Format = string | ((req: any, res: any, formatter: ((str: string) => string)) => string);
export interface Log4js {
getLogger(category?: string): Logger;
configure(filename: string): Log4js;
configure(config: Configuration): Log4js;
addLayout(name: string, config: (a: any) => (logEvent: LoggingEvent) => string): void;
connectLogger(logger: Logger, options: { format?: Format; level?: string; nolog?: any; }): any; // express.Handler;
levels: Levels;
shutdown(cb: (error: Error) => void): void | null;
}
export function getLogger(category?: string): Logger;
export function configure(filename: string): Log4js;
export function configure(config: Configuration): Log4js;
export function addLayout(name: string, config: (a: any) => (logEvent: LoggingEvent) => any): void;
export function connectLogger(logger: Logger, options: { format?: Format; level?: string; nolog?: any; statusRules?: any[], context?: boolean }): any; // express.Handler;
export function recording(): Recording;
export const levels: Levels;
export function shutdown(cb?: (error: Error) => void): void | null;
export interface BasicLayout {
type: 'basic';
}
export interface ColoredLayout {
type: 'colored' | 'coloured';
}
export interface MessagePassThroughLayout {
type: 'messagePassThrough';
}
export interface DummyLayout {
type: 'dummy';
}
export interface Level {
isEqualTo(other: string): boolean;
isEqualTo(otherLevel: Level): boolean;
isLessThanOrEqualTo(other: string): boolean;
isLessThanOrEqualTo(otherLevel: Level): boolean;
isGreaterThanOrEqualTo(other: string): boolean;
isGreaterThanOrEqualTo(otherLevel: Level): boolean;
colour: string;
level: number;
levelStr: string;
}
export interface LoggingEvent {
categoryName: string; // name of category
level: Level; // level of message
data: any[]; // objects to log
startTime: Date;
pid: number;
context: any;
cluster?: {
workerId: number;
worker: number;
};
functionName?: string;
fileName?: string;
lineNumber?: number;
columnNumber?: number;
callStack?: string;
}
export type Token = ((logEvent: LoggingEvent) => string) | string;
export interface PatternLayout {
type: 'pattern';
// specifier for the output format, using placeholders as described below
pattern: string;
// user-defined tokens to be used in the pattern
tokens?: { [name: string]: Token };
}
export interface CustomLayout {
[key: string]: any;
type: string;
}
export type Layout = BasicLayout | ColoredLayout | MessagePassThroughLayout | DummyLayout | PatternLayout | CustomLayout;
/**
* Category Filter
*
* @see https://log4js-node.github.io/log4js-node/categoryFilter.html
*/
export interface CategoryFilterAppender {
type: "categoryFilter";
// the category (or categories if you provide an array of values) that will be excluded from the appender.
exclude?: string | string[];
// the name of the appender to filter. see https://log4js-node.github.io/log4js-node/layouts.html
appender?: string;
}
/**
* No Log Filter
*
* @see https://log4js-node.github.io/log4js-node/noLogFilter.html
*/
export interface NoLogFilterAppender {
type: "noLogFilter";
// the regular expression (or the regular expressions if you provide an array of values)
// will be used for evaluating the events to pass to the appender.
// The events, which will match the regular expression, will be excluded and so not logged.
exclude: string | string[];
// the name of an appender, defined in the same configuration, that you want to filter.
appender: string;
}
/**
* Console Appender
*
* @see https://log4js-node.github.io/log4js-node/console.html
*/
export interface ConsoleAppender {
type: 'console';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface FileAppender {
type: 'file';
// the path of the file where you want your logs written.
filename: string;
// (defaults to MAX_SAFE_INTEGER) the maximum size (in bytes) for the log file.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
keepFileExt?: boolean;
}
export interface SyncfileAppender {
type: 'fileSync';
// the path of the file where you want your logs written.
filename: string;
// (defaults to undefined) the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
}
export interface DateFileAppender {
type: 'dateFile';
// the path of the file where you want your logs written.
filename: string;
// (defaults to yyyy-MM-dd) the pattern to use to determine when to roll the logs.
/**
* The following strings are recognised in the pattern:
* - yyyy : the full year, use yy for just the last two digits
* - MM : the month
* - dd : the day of the month
* - hh : the hour of the day (24-hour clock)
* - mm : the minute of the hour
* - ss : seconds
* - SSS : milliseconds (although I'm not sure you'd want to roll your logs every millisecond)
* - O : timezone (capital letter o)
*/
pattern?: string;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
keepFileExt?: boolean;
// (defaults to false) include the pattern in the name of the current log file.
alwaysIncludePattern?: boolean;
// (defaults to 1) the number of old files that matches the pattern to keep (excluding the hot file).
numBackups?: number;
}
export interface LogLevelFilterAppender {
type: 'logLevelFilter';
// the name of an appender, defined in the same configuration, that you want to filter
appender: string;
// the minimum level of event to allow through the filter
level: string;
// (defaults to FATAL) the maximum level of event to allow through the filter
maxLevel?: string;
}
export interface MultiFileAppender {
type: 'multiFile';
// the base part of the generated log filename
base: string;
// the value to use to split files (see below).
property: string;
// the suffix for the generated log filename.
extension: string;
}
export interface MultiprocessAppender {
type: 'multiprocess';
// controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server.
mode: 'master' | 'worker';
// (only needed if mode == master) the name of the appender to send the log events to
appender?: string;
// (defaults to 5000) the port to listen on, or send to
loggerPort?: number;
// (defaults to localhost) the host/IP address to listen on, or send to
loggerHost?: string;
}
export interface RecordingAppender {
type: 'recording';
}
export interface StandardErrorAppender {
type: 'stderr';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface StandardOutputAppender {
type: 'stdout';
// (defaults to ColoredLayout)
layout?: Layout;
}
/**
* TCP Appender
*
* @see https://log4js-node.github.io/log4js-node/tcp.html
*/
export interface TCPAppender {
type: 'tcp';
// (defaults to 5000)
port?: number
// (defaults to localhost)
host?: string
// (defaults to __LOG4JS__)
endMsg?: string
// (defaults to a serialized log event)
layout?: Layout;
}
export interface CustomAppender {
type: string | AppenderModule;
[key: string]: any;
}
export interface AppenderModule {
configure: (config: Config, layouts: LayoutsParam) => AppenderFunction;
}
export type AppenderFunction = (loggingEvent: LoggingEvent) => void;
// TODO: Actually add types here...
// It's supposed to be the full config element
export type Config = any
export interface LayoutsParam {
basicLayout: LayoutFunction;
messagePassThroughLayout: LayoutFunction;
patternLayout: LayoutFunction;
colouredLayout: LayoutFunction;
coloredLayout: LayoutFunction;
dummyLayout: LayoutFunction;
addLayout: (name: string, serializerGenerator: LayoutFunction) => void;
layout: (name: string, config: PatternToken) => LayoutFunction;
}
export interface PatternToken {
pattern: string; // TODO type this to enforce good pattern...
tokens: { [tokenName: string]: () => any };
}
export type LayoutFunction = (loggingEvent: LoggingEvent) => string;
export type Appender = CategoryFilterAppender
| ConsoleAppender
| FileAppender
| SyncfileAppender
| DateFileAppender
| LogLevelFilterAppender
| NoLogFilterAppender
| MultiFileAppender
| MultiprocessAppender
| RecordingAppender
| StandardErrorAppender
| StandardOutputAppender
| TCPAppender
| CustomAppender;
export interface Levels {
ALL: Level;
MARK: Level;
TRACE: Level;
DEBUG: Level;
INFO: Level;
WARN: Level;
ERROR: Level;
FATAL: Level;
OFF: Level;
levels: Level[];
getLevel(level: Level | string, defaultLevel?: Level): Level;
addLevels(customLevels: object): void;
}
export interface Configuration {
appenders: { [name: string]: Appender; };
categories: { [name: string]: { appenders: string[]; level: string; enableCallStack?: boolean; } };
pm2?: boolean;
pm2InstanceVar?: string;
levels?: Levels;
disableClustering?: boolean;
}
export interface Recording {
configure(loggingEvent: LoggingEvent): void
replay(): LoggingEvent[]
playback(): LoggingEvent[]
reset(): void
erase(): void
}
export class Logger {
constructor(name: string);
readonly category: string;
level: Level | string;
log(level: Level | string, ...args: any[]): void;
isLevelEnabled(level?: string): boolean;
isTraceEnabled(): boolean;
isDebugEnabled(): boolean;
isInfoEnabled(): boolean;
isWarnEnabled(): boolean;
isErrorEnabled(): boolean;
isFatalEnabled(): boolean;
_log(level: Level, data: any): void;
addContext(key: string, value: any): void;
removeContext(key: string): void;
clearContext(): void;
setParseCallStackFunction(parseFunction: Function): void;
trace(message: any, ...args: any[]): void;
debug(message: any, ...args: any[]): void;
info(message: any, ...args: any[]): void;
warn(message: any, ...args: any[]): void;
error(message: any, ...args: any[]): void;
fatal(message: any, ...args: any[]): void;
mark(message: any, ...args: any[]): void;
}
| 1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/index.md | # log4js-node
This is a conversion of the [log4js](https://github.com/stritti/log4js)
framework to work with [node](http://nodejs.org). I started out just stripping out the browser-specific code and tidying up some of the javascript to work better in node. It grew from there. Although it's got a similar name to the Java library [log4j](https://logging.apache.org/log4j/2.x/), thinking that it will behave the same way will only bring you sorrow and confusion.
[Changes in version 3.x](v3-changes.md)
## Migrating from log4js < v2.x?
There have been a few changes between log4js 1.x and 2.x (and 0.x too). You should probably read this [migration guide](migration-guide.md) if things aren't working.
## Features
- coloured console logging to [stdout](stdout.md) or [stderr](stderr.md)
- [file appender](file.md), with configurable log rolling based on file size or [date](dateFile.md)
- [SMTP appender](https://github.com/log4js-node/smtp)
- [GELF appender](https://github.com/log4js-node/gelf)
- [Loggly appender](https://github.com/log4js-node/loggly)
- [Logstash UDP appender](https://github.com/log4js-node/logstashUDP)
- logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender
- [TCP appender](tcp.md) (useful when you've got multiple servers but want to centralise logging)
- a [logger for connect/express](connect-logger.md) servers
- configurable log message [layout/patterns](layouts.md)
- different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
- built-in support for logging with node core's `cluster` module
- third-party [InfluxDB appender](https://github.com/rnd-debug/log4js-influxdb-appender)
## Installation
```bash
npm install log4js
```
## Usage
Minimalist version:
```javascript
var log4js = require("log4js");
var logger = log4js.getLogger();
logger.level = "debug"; // default level is OFF - which means no logs at all.
logger.debug("Some debug messages");
```
## Clustering
If you use node's cluster, or passenger, or pm2, then you should read this [clustering guide](clustering.md)
## Note for library makers
If you're writing a library and would like to include support for log4js, without introducing a dependency headache for your users, take a look at [log4js-api](https://github.com/log4js-node/log4js-api).
## License
The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
keep the original copyright and author credits in place, except in sections that I have rewritten
extensively.
| # log4js-node
This is a conversion of the [log4js](https://github.com/stritti/log4js)
framework to work with [node](http://nodejs.org). I started out just stripping out the browser-specific code and tidying up some of the javascript to work better in node. It grew from there. Although it's got a similar name to the Java library [log4j](https://logging.apache.org/log4j/2.x/), thinking that it will behave the same way will only bring you sorrow and confusion.
[Changes in version 3.x](v3-changes.md)
## Migrating from log4js < v2.x?
There have been a few changes between log4js 1.x and 2.x (and 0.x too). You should probably read this [migration guide](migration-guide.md) if things aren't working.
## Features
- coloured console logging to [stdout](stdout.md) or [stderr](stderr.md)
- [file appender](file.md), with configurable log rolling based on file size or [date](dateFile.md)
- [SMTP appender](https://github.com/log4js-node/smtp)
- [GELF appender](https://github.com/log4js-node/gelf)
- [Loggly appender](https://github.com/log4js-node/loggly)
- [Logstash UDP appender](https://github.com/log4js-node/logstashUDP)
- logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender
- [TCP appender](tcp.md) (useful when you've got multiple servers but want to centralise logging)
- a [logger for connect/express](connect-logger.md) servers
- configurable log message [layout/patterns](layouts.md)
- different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
- built-in support for logging with node core's `cluster` module
- third-party [InfluxDB appender](https://github.com/rnd-debug/log4js-influxdb-appender)
## Installation
```bash
npm install log4js
```
## Usage
Minimalist version:
```javascript
var log4js = require("log4js");
var logger = log4js.getLogger();
logger.level = "debug"; // default level is OFF - which means no logs at all.
logger.debug("Some debug messages");
```
## Clustering
If you use node's cluster, or passenger, or pm2, then you should read this [clustering guide](clustering.md)
## Note for library makers
If you're writing a library and would like to include support for log4js, without introducing a dependency headache for your users, take a look at [log4js-api](https://github.com/log4js-node/log4js-api).
## License
The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
keep the original copyright and author credits in place, except in sections that I have rewritten
extensively.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/webpack.md | # Working with webpack
Log4js uses dynamic require for loading appenders. Webpack doesn't know at build time which appender will be used at runtime so a small workaround is necessary.
```
const stdout = require('log4js/lib/appenders/stdout');
import * as Configuration from 'log4js/lib/configuration';
Configuration.prototype.loadAppenderModule = function(type) {
return stdout;
};
```
| # Working with webpack
Log4js uses dynamic require for loading appenders. Webpack doesn't know at build time which appender will be used at runtime so a small workaround is necessary.
```
const stdout = require('log4js/lib/appenders/stdout');
import * as Configuration from 'log4js/lib/configuration';
Configuration.prototype.loadAppenderModule = function(type) {
return stdout;
};
```
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/dateFile.md | # Date Rolling File Appender
This is a file appender that rolls log files based on a configurable time, rather than the file size. When using the date file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the date file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
* `type` - `"dateFile"`
* `filename` - `string` - the path of the file where you want your logs written.
* `pattern` - `string` (optional, defaults to `yyyy-MM-dd`) - the pattern to use to determine when to roll the logs.
* `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
* `encoding` - `string` (default "utf-8")
* `mode`- `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
* `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
* `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
* `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
* `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt)
* `alwaysIncludePattern` - `boolean` (default false) - include the pattern in the name of the current log file.
* `numBackups` - `integer` (default 1) - the number of old files that matches the pattern to keep (excluding the hot file).
The `pattern` is used to determine when the current log file should be renamed and a new log file created. For example, with a filename of 'cheese.log', and the default pattern of `.yyyy-MM-dd` - on startup this will result in a file called `cheese.log` being created and written to until the next write after midnight. When this happens, `cheese.log` will be renamed to `cheese.log.2017-04-30` and a new `cheese.log` file created. The appender uses the [date-format](https://github.com/nomiddlename/date-format) library to parse the `pattern`, and any of the valid formats can be used. Also note that there is no timer controlling the log rolling - changes in the pattern are determined on every log write. If no writes occur, then no log rolling will happen. If your application logs infrequently this could result in no log file being written for a particular time period.
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [file appender](file.md) as well. So you could roll files by both date and size.
## Example (default daily log rolling)
```javascript
log4js.configure({
appenders: {
everything: { type: 'dateFile', filename: 'all-the-logs.log' }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug' }
}
});
```
This example will result in files being rolled every day. The initial file will be `all-the-logs.log`, with the daily backups being `all-the-logs.log.2017-04-30`, etc.
## Example with hourly log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: { type: 'dateFile', filename: 'all-the-logs.log', pattern: 'yyyy-MM-dd-hh', compress: true }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug'}
}
});
```
This will result in one current log file (`all-the-logs.log`). Every hour this file will be compressed and renamed to `all-the-logs.log.2017-04-30-08.gz` (for example) and a new `all-the-logs.log` created.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: 'dateFile', filename: 'out.log' }
},
categories: { default: { appenders: ['output'], level: 'debug'}}
});
let paused = false;
process.on("log4js:pause", (value) => paused = value);
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| # Date Rolling File Appender
This is a file appender that rolls log files based on a configurable time, rather than the file size. When using the date file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the date file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
* `type` - `"dateFile"`
* `filename` - `string` - the path of the file where you want your logs written.
* `pattern` - `string` (optional, defaults to `yyyy-MM-dd`) - the pattern to use to determine when to roll the logs.
* `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
* `encoding` - `string` (default "utf-8")
* `mode`- `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
* `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
* `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
* `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
* `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt)
* `alwaysIncludePattern` - `boolean` (default false) - include the pattern in the name of the current log file.
* `numBackups` - `integer` (default 1) - the number of old files that matches the pattern to keep (excluding the hot file).
The `pattern` is used to determine when the current log file should be renamed and a new log file created. For example, with a filename of 'cheese.log', and the default pattern of `.yyyy-MM-dd` - on startup this will result in a file called `cheese.log` being created and written to until the next write after midnight. When this happens, `cheese.log` will be renamed to `cheese.log.2017-04-30` and a new `cheese.log` file created. The appender uses the [date-format](https://github.com/nomiddlename/date-format) library to parse the `pattern`, and any of the valid formats can be used. Also note that there is no timer controlling the log rolling - changes in the pattern are determined on every log write. If no writes occur, then no log rolling will happen. If your application logs infrequently this could result in no log file being written for a particular time period.
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [file appender](file.md) as well. So you could roll files by both date and size.
## Example (default daily log rolling)
```javascript
log4js.configure({
appenders: {
everything: { type: 'dateFile', filename: 'all-the-logs.log' }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug' }
}
});
```
This example will result in files being rolled every day. The initial file will be `all-the-logs.log`, with the daily backups being `all-the-logs.log.2017-04-30`, etc.
## Example with hourly log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: { type: 'dateFile', filename: 'all-the-logs.log', pattern: 'yyyy-MM-dd-hh', compress: true }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug'}
}
});
```
This will result in one current log file (`all-the-logs.log`). Every hour this file will be compressed and renamed to `all-the-logs.log.2017-04-30-08.gz` (for example) and a new `all-the-logs.log` created.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: 'dateFile', filename: 'out.log' }
},
categories: { default: { appenders: ['output'], level: 'debug'}}
});
let paused = false;
process.on("log4js:pause", (value) => paused = value);
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/categories.md | # Categories
Categories are groups of log events. The category for log events is defined when you get a _Logger_ from log4js (`log4js.getLogger('somecategory')`). Log events with the same _category_ will go to the same _appenders_.
## Default configuration
When defining your appenders through a configuration, at least one category must be defined.
```javascript
const log4js = require('log4js');
log4js.configure({
appenders: {
out: { type: 'stdout' },
app: { type: 'file', filename: 'application.log' }
},
categories: {
default: { appenders: [ 'out' ], level: 'trace' },
app: { appenders: ['app'], level: 'trace' }
}
});
const logger = log4js.getLogger();
logger.trace('This will use the default category and go to stdout');
const logToFile = log4js.getLogger('app');
logToFile.trace('This will go to a file');
```
## Categories inheritance
Log4js supports a hierarchy for categories, using dots to separate layers - for example, log events in the category 'myapp.submodule' will use the level for 'myapp' if none is defined for 'myapp.submodule', and also any appenders defined for 'myapp'.
This behaviour can be disabled by setting inherit=false on the sub-category.
```javascript
const log4js = require('log4js');
log4js.configure({
appenders: {
console: { type: 'console' },
app: { type: 'file', filename: 'application.log' }
},
categories: {
default: { appenders: [ 'console' ], level: 'trace' },
catA: { appenders: ['console'], level: 'error' },
'catA.catB': { appenders: ['app'], level: 'trace' },
}
});
const loggerA = log4js.getLogger('catA');
loggerA.error('This will be written to console with log level ERROR');
loggerA.trace('This will not be written');
const loggerAB = log4js.getLogger('catA.catB');
loggerAB.error('This will be written with log level ERROR to console and to a file');
loggerAB.trace('This will be written with log level TRACE to console and to a file');
```
Two categories are defined:
- Log events with category 'catA' will go to appender 'console' only.
- Log events with category 'catA.catB' will go to appenders 'console' and 'app'.
Appenders will see and log an event only if the category level is less than or equal to the event's level.
| # Categories
Categories are groups of log events. The category for log events is defined when you get a _Logger_ from log4js (`log4js.getLogger('somecategory')`). Log events with the same _category_ will go to the same _appenders_.
## Default configuration
When defining your appenders through a configuration, at least one category must be defined.
```javascript
const log4js = require('log4js');
log4js.configure({
appenders: {
out: { type: 'stdout' },
app: { type: 'file', filename: 'application.log' }
},
categories: {
default: { appenders: [ 'out' ], level: 'trace' },
app: { appenders: ['app'], level: 'trace' }
}
});
const logger = log4js.getLogger();
logger.trace('This will use the default category and go to stdout');
const logToFile = log4js.getLogger('app');
logToFile.trace('This will go to a file');
```
## Categories inheritance
Log4js supports a hierarchy for categories, using dots to separate layers - for example, log events in the category 'myapp.submodule' will use the level for 'myapp' if none is defined for 'myapp.submodule', and also any appenders defined for 'myapp'.
This behaviour can be disabled by setting inherit=false on the sub-category.
```javascript
const log4js = require('log4js');
log4js.configure({
appenders: {
console: { type: 'console' },
app: { type: 'file', filename: 'application.log' }
},
categories: {
default: { appenders: [ 'console' ], level: 'trace' },
catA: { appenders: ['console'], level: 'error' },
'catA.catB': { appenders: ['app'], level: 'trace' },
}
});
const loggerA = log4js.getLogger('catA');
loggerA.error('This will be written to console with log level ERROR');
loggerA.trace('This will not be written');
const loggerAB = log4js.getLogger('catA.catB');
loggerAB.error('This will be written with log level ERROR to console and to a file');
loggerAB.trace('This will be written with log level TRACE to console and to a file');
```
Two categories are defined:
- Log events with category 'catA' will go to appender 'console' only.
- Log events with category 'catA.catB' will go to appenders 'console' and 'app'.
Appenders will see and log an event only if the category level is less than or equal to the event's level.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/stderr.md | # Standard Error Appender
This appender writes all log events to the standard error stream.
# Configuration
* `type` - `stderr`
* `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
# Example
```javascript
log4js.configure({
appenders: { err: { type: 'stderr' } },
categories: { default: { appenders: ['err'], level: 'ERROR' } }
});
```
| # Standard Error Appender
This appender writes all log events to the standard error stream.
# Configuration
* `type` - `stderr`
* `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
# Example
```javascript
log4js.configure({
appenders: { err: { type: 'stderr' } },
categories: { default: { appenders: ['err'], level: 'ERROR' } }
});
```
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/writing-appenders.md | # Writing Appenders for Log4js
Log4js can load appenders from outside its core set. To add a custom appender, the easiest way is to make it a stand-alone module and publish to npm. You can also load appenders from your own application, but they must be defined in a module.
## Loading mechanism
When log4js parses your configuration, it loops through the defined appenders. For each one, it will `require` the appender initially using the `type` value prepended with './appenders' as the module identifier - this is to try loading from the core appenders first. If that fails (the module could not be found in the core appenders), then log4js will try to require the module using variations of the `type` value.
Log4js checks the following places (in this order) for appenders based on the type value:
1. Bundled core appenders (within appenders directory): `require('./' + type)`
2. node_modules: `require(type)`
3. relative to the main file of your application: `require(path.dirname(require.main.filename) + '/' + type)`
4. relative to the process' current working directory: `require(process.cwd() + '/' + type)`
If that fails, an error will be raised.
## Appender Modules
An appender module should export a single function called `configure`. The function should accept the following arguments:
* `config` - `object` - the appender's configuration object
* `layouts` - `module` - gives access to the [layouts](layouts.md) module, which most appenders will need
* `layout` - `function(type, config)` - this is the main function that appenders will use to find a layout
* `findAppender` - `function(name)` - if your appender is a wrapper around another appender (like the [logLevelFilter](logLevelFilter.md) for example), this function can be used to find another appender by name
* `levels` - `module` - gives access to the [levels](levels.md) module, which most appenders will need
`configure` should return a function which accepts a logEvent, which is the appender itself. One of the simplest examples is the [stdout](stdout.md) appender. Let's run through the code.
## Example
```javascript
// This is the function that generates an appender function
function stdoutAppender(layout, timezoneOffset) {
// This is the appender function itself
return (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
}
// stdout configure doesn't need to use findAppender, or levels
function configure(config, layouts) {
// the default layout for the appender
let layout = layouts.colouredLayout;
// check if there is another layout specified
if (config.layout) {
// load the layout
layout = layouts.layout(config.layout.type, config.layout);
}
//create a new appender instance
return stdoutAppender(layout, config.timezoneOffset);
}
//export the only function needed
exports.configure = configure;
```
# Shutdown functions
It's a good idea to implement a `shutdown` function on your appender instances. This function will get called by `log4js.shutdown` and signals that `log4js` has been asked to stop logging. Usually this is because of a fatal exception, or the application is being stopped. Your shutdown function should make sure that all asynchronous operations finish, and that any resources are cleaned up. The function must be named `shutdown`, take one callback argument, and be a property of the appender instance. Let's add a shutdown function to the `stdout` appender as an example.
## Example (shutdown)
```javascript
// This is the function that generates an appender function
function stdoutAppender(layout, timezoneOffset) {
// This is the appender function itself
const appender = (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
// add a shutdown function.
appender.shutdown = (done) => {
process.stdout.write('', done);
};
return appender;
}
// ... rest of the code as above
```
| # Writing Appenders for Log4js
Log4js can load appenders from outside its core set. To add a custom appender, the easiest way is to make it a stand-alone module and publish to npm. You can also load appenders from your own application, but they must be defined in a module.
## Loading mechanism
When log4js parses your configuration, it loops through the defined appenders. For each one, it will `require` the appender initially using the `type` value prepended with './appenders' as the module identifier - this is to try loading from the core appenders first. If that fails (the module could not be found in the core appenders), then log4js will try to require the module using variations of the `type` value.
Log4js checks the following places (in this order) for appenders based on the type value:
1. Bundled core appenders (within appenders directory): `require('./' + type)`
2. node_modules: `require(type)`
3. relative to the main file of your application: `require(path.dirname(require.main.filename) + '/' + type)`
4. relative to the process' current working directory: `require(process.cwd() + '/' + type)`
If that fails, an error will be raised.
## Appender Modules
An appender module should export a single function called `configure`. The function should accept the following arguments:
* `config` - `object` - the appender's configuration object
* `layouts` - `module` - gives access to the [layouts](layouts.md) module, which most appenders will need
* `layout` - `function(type, config)` - this is the main function that appenders will use to find a layout
* `findAppender` - `function(name)` - if your appender is a wrapper around another appender (like the [logLevelFilter](logLevelFilter.md) for example), this function can be used to find another appender by name
* `levels` - `module` - gives access to the [levels](levels.md) module, which most appenders will need
`configure` should return a function which accepts a logEvent, which is the appender itself. One of the simplest examples is the [stdout](stdout.md) appender. Let's run through the code.
## Example
```javascript
// This is the function that generates an appender function
function stdoutAppender(layout, timezoneOffset) {
// This is the appender function itself
return (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
}
// stdout configure doesn't need to use findAppender, or levels
function configure(config, layouts) {
// the default layout for the appender
let layout = layouts.colouredLayout;
// check if there is another layout specified
if (config.layout) {
// load the layout
layout = layouts.layout(config.layout.type, config.layout);
}
//create a new appender instance
return stdoutAppender(layout, config.timezoneOffset);
}
//export the only function needed
exports.configure = configure;
```
# Shutdown functions
It's a good idea to implement a `shutdown` function on your appender instances. This function will get called by `log4js.shutdown` and signals that `log4js` has been asked to stop logging. Usually this is because of a fatal exception, or the application is being stopped. Your shutdown function should make sure that all asynchronous operations finish, and that any resources are cleaned up. The function must be named `shutdown`, take one callback argument, and be a property of the appender instance. Let's add a shutdown function to the `stdout` appender as an example.
## Example (shutdown)
```javascript
// This is the function that generates an appender function
function stdoutAppender(layout, timezoneOffset) {
// This is the appender function itself
const appender = (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
// add a shutdown function.
appender.shutdown = (done) => {
process.stdout.write('', done);
};
return appender;
}
// ... rest of the code as above
```
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/logLevelFilter.md | # Log Level Filter
The log level filter allows you to restrict the log events that an appender will record based on the level of those events. This is useful when you want most logs to go to a file, but errors to be sent as emails, for example. The filter works by wrapping around another appender and controlling which events get sent to it.
## Configuration
* `type` - `logLevelFilter`
* `appender` - `string` - the name of an appender, defined in the same configuration, that you want to filter
* `level` - `string` - the minimum level of event to allow through the filter
* `maxLevel` - `string` (optional, defaults to `FATAL`) - the maximum level of event to allow through the filter
If an event's level is greater than or equal to `level` and less than or equal to `maxLevel` then it will be sent to the appender.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
emergencies: { type: 'file', filename: 'panic-now.log' },
just-errors: { type: 'logLevelFilter', appender: 'emergencies', level: 'error' }
},
categories: {
default: { appenders: ['just-errors', 'everything' ], level: 'debug' }
}
});
```
Log events of `debug`, `info`, `error`, and `fatal` will go to `all-the-logs.log`. Events of `error` and `fatal` will also go to `panic-now.log`.
| # Log Level Filter
The log level filter allows you to restrict the log events that an appender will record based on the level of those events. This is useful when you want most logs to go to a file, but errors to be sent as emails, for example. The filter works by wrapping around another appender and controlling which events get sent to it.
## Configuration
* `type` - `logLevelFilter`
* `appender` - `string` - the name of an appender, defined in the same configuration, that you want to filter
* `level` - `string` - the minimum level of event to allow through the filter
* `maxLevel` - `string` (optional, defaults to `FATAL`) - the maximum level of event to allow through the filter
If an event's level is greater than or equal to `level` and less than or equal to `maxLevel` then it will be sent to the appender.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
emergencies: { type: 'file', filename: 'panic-now.log' },
just-errors: { type: 'logLevelFilter', appender: 'emergencies', level: 'error' }
},
categories: {
default: { appenders: ['just-errors', 'everything' ], level: 'debug' }
}
});
```
Log events of `debug`, `info`, `error`, and `fatal` will go to `all-the-logs.log`. Events of `error` and `fatal` will also go to `panic-now.log`.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/categoryFilter.md | # Category Filter
This is not strictly an appender - it wraps around another appender and stops log events from specific categories from being written to that appender. This could be useful when debugging your application, but you have one component that logs noisily, or is irrelevant to your investigation.
## Configuration
* `type` - `"categoryFilter"`
* `exclude` - `string | Array<string>` - the category (or categories if you provide an array of values) that will be excluded from the appender.
* `appender` - `string` - the name of the appender to filter.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
'no-noise': { type: 'categoryFilter', exclude: 'noisy.component', appender: 'everything' }
},
categories: {
default: { appenders: [ 'no-noise' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
const noisyLogger = log4js.getLogger('noisy.component');
logger.debug('I will be logged in all-the-logs.log');
noisyLogger.debug('I will not be logged.');
```
Note that you can achieve the same outcome without using the category filter, like this:
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug' },
'noisy.component': { appenders: ['everything'], level: 'off' }
}
});
const logger = log4js.getLogger();
const noisyLogger = log4js.getLogger('noisy.component');
logger.debug('I will be logged in all-the-logs.log');
noisyLogger.debug('I will not be logged.');
```
Category filter becomes useful when you have many categories you want to exclude, passing them as an array.
| # Category Filter
This is not strictly an appender - it wraps around another appender and stops log events from specific categories from being written to that appender. This could be useful when debugging your application, but you have one component that logs noisily, or is irrelevant to your investigation.
## Configuration
* `type` - `"categoryFilter"`
* `exclude` - `string | Array<string>` - the category (or categories if you provide an array of values) that will be excluded from the appender.
* `appender` - `string` - the name of the appender to filter.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
'no-noise': { type: 'categoryFilter', exclude: 'noisy.component', appender: 'everything' }
},
categories: {
default: { appenders: [ 'no-noise' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
const noisyLogger = log4js.getLogger('noisy.component');
logger.debug('I will be logged in all-the-logs.log');
noisyLogger.debug('I will not be logged.');
```
Note that you can achieve the same outcome without using the category filter, like this:
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' }
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug' },
'noisy.component': { appenders: ['everything'], level: 'off' }
}
});
const logger = log4js.getLogger();
const noisyLogger = log4js.getLogger('noisy.component');
logger.debug('I will be logged in all-the-logs.log');
noisyLogger.debug('I will not be logged.');
```
Category filter becomes useful when you have many categories you want to exclude, passing them as an array.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/multiFile.md | # MultiFile Appender
The multiFile appender can be used to dynamically write logs to multiple files, based on a property of the logging event. Use this as a way to write separate log files for each category when the number of categories is unknown, for instance. It creates [file](file.md) appenders under the hood, so all the options that apply to that appender (apart from filename) can be used with this one, allowing the log files to be rotated and capped at a certain size.
## Configuration
* `type` - `"multiFile"`
* `base` - `string` - the base part of the generated log filename
* `property` - `string` - the value to use to split files (see below).
* `extension` - `string` - the suffix for the generated log filename.
* `timeout` - `integer` - optional activity timeout in ms after which the file will be closed.
All other properties will be passed to the created [file](file.md) appenders. For the property value, `categoryName` is probably the most useful - although you could use `pid` or `level`. If the property is not found then the appender will look for the value in the context map. If that fails, then the logger will not output the logging event, without an error. This is to allow for dynamic properties which may not exist for all log messages.
## Example (split on category)
```javascript
log4js.configure({
appenders: {
multi: { type: 'multiFile', base: 'logs/', property: 'categoryName', extension: '.log' }
},
categories: {
default: { appenders: [ 'multi' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in logs/default.log');
const otherLogger = log4js.getLogger('cheese');
otherLogger.info('Cheese is cheddar - this will be logged in logs/cheese.log');
```
This example will result in two log files (`logs/default.log` and `logs/cheese.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: 'multiFile', base: 'logs/', property: 'userID', extension: '.log',
maxLogSize: 10485760, backups: 3, compress: true
}
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug'}
}
});
const userLogger = log4js.getLogger('user');
userLogger.addContext('userID', user.getID());
userLogger.info('this user just logged in');
```
This will result in one log file (`logs/u12345.log`), capped at 10Mb in size, with three backups kept when rolling the file. If more users were logged, each user would get their own files, and their own backups.
| # MultiFile Appender
The multiFile appender can be used to dynamically write logs to multiple files, based on a property of the logging event. Use this as a way to write separate log files for each category when the number of categories is unknown, for instance. It creates [file](file.md) appenders under the hood, so all the options that apply to that appender (apart from filename) can be used with this one, allowing the log files to be rotated and capped at a certain size.
## Configuration
* `type` - `"multiFile"`
* `base` - `string` - the base part of the generated log filename
* `property` - `string` - the value to use to split files (see below).
* `extension` - `string` - the suffix for the generated log filename.
* `timeout` - `integer` - optional activity timeout in ms after which the file will be closed.
All other properties will be passed to the created [file](file.md) appenders. For the property value, `categoryName` is probably the most useful - although you could use `pid` or `level`. If the property is not found then the appender will look for the value in the context map. If that fails, then the logger will not output the logging event, without an error. This is to allow for dynamic properties which may not exist for all log messages.
## Example (split on category)
```javascript
log4js.configure({
appenders: {
multi: { type: 'multiFile', base: 'logs/', property: 'categoryName', extension: '.log' }
},
categories: {
default: { appenders: [ 'multi' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in logs/default.log');
const otherLogger = log4js.getLogger('cheese');
otherLogger.info('Cheese is cheddar - this will be logged in logs/cheese.log');
```
This example will result in two log files (`logs/default.log` and `logs/cheese.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: 'multiFile', base: 'logs/', property: 'userID', extension: '.log',
maxLogSize: 10485760, backups: 3, compress: true
}
},
categories: {
default: { appenders: [ 'everything' ], level: 'debug'}
}
});
const userLogger = log4js.getLogger('user');
userLogger.addContext('userID', user.getID());
userLogger.info('this user just logged in');
```
This will result in one log file (`logs/u12345.log`), capped at 10Mb in size, with three backups kept when rolling the file. If more users were logged, each user would get their own files, and their own backups.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./README.md | log4js-node [](https://github.com/log4js-node/log4js-node/actions/workflows/codeql-analysis.yml) [](https://github.com/log4js-node/log4js-node/actions/workflows/node.js.yml)
===========
[](https://nodei.co/npm/log4js/)
This is a conversion of the [log4js](https://github.com/stritti/log4js)
framework to work with [node](http://nodejs.org). I started out just stripping out the browser-specific code and tidying up some of the javascript to work better in node. It grew from there. Although it's got a similar name to the Java library [log4j](https://logging.apache.org/log4j/2.x/), thinking that it will behave the same way will only bring you sorrow and confusion.
The full documentation is available [here](https://log4js-node.github.io/log4js-node/).
[Changes in version 3.x](https://log4js-node.github.io/log4js-node/v3-changes.md)
There have been a few changes between log4js 1.x and 2.x (and 0.x too). You should probably read this [migration guide](https://log4js-node.github.io/log4js-node/migration-guide.html) if things aren't working.
Out of the box it supports the following features:
- coloured console logging to stdout or stderr
- file appender, with configurable log rolling based on file size or date
- a logger for connect/express servers
- configurable log message layout/patterns
- different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
Optional appenders are available:
- [SMTP](https://github.com/log4js-node/smtp)
- [GELF](https://github.com/log4js-node/gelf)
- [Loggly](https://github.com/log4js-node/loggly)
- Logstash ([UDP](https://github.com/log4js-node/logstashUDP) and [HTTP](https://github.com/log4js-node/logstashHTTP))
- logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP))
- [RabbitMQ](https://github.com/log4js-node/rabbitmq)
- [Redis](https://github.com/log4js-node/redis)
- [Hipchat](https://github.com/log4js-node/hipchat)
- [Slack](https://github.com/log4js-node/slack)
- [mailgun](https://github.com/log4js-node/mailgun)
- [InfluxDB](https://github.com/rnd-debug/log4js-influxdb-appender)
## Getting help
Having problems? Jump on the [slack](https://join.slack.com/t/log4js-node/shared_invite/enQtODkzMDQ3MzExMDczLWUzZmY0MmI0YWI1ZjFhODY0YjI0YmU1N2U5ZTRkOTYyYzg3MjY5NWI4M2FjZThjYjdiOGM0NjU2NzBmYTJjOGI) channel, or create an issue. If you want to help out with the development, the slack channel is a good place to go as well.
## installation
```bash
npm install log4js
```
## usage
Minimalist version:
```javascript
var log4js = require("log4js");
var logger = log4js.getLogger();
logger.level = "debug";
logger.debug("Some debug messages");
```
By default, log4js will not output any logs (so that it can safely be used in libraries). The `level` for the `default` category is set to `OFF`. To enable logs, set the level (as in the example). This will then output to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see:
```bash
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
```
See example.js for a full example, but here's a snippet (also in `examples/fromreadme.js`):
```javascript
const log4js = require("log4js");
log4js.configure({
appenders: { cheese: { type: "file", filename: "cheese.log" } },
categories: { default: { appenders: ["cheese"], level: "error" } }
});
const logger = log4js.getLogger("cheese");
logger.trace("Entering cheese testing");
logger.debug("Got cheese.");
logger.info("Cheese is Comté.");
logger.warn("Cheese is quite smelly.");
logger.error("Cheese is too ripe!");
logger.fatal("Cheese was breeding ground for listeria.");
```
Output (in `cheese.log`):
```bash
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
```
## Note for library makers
If you're writing a library and would like to include support for log4js, without introducing a dependency headache for your users, take a look at [log4js-api](https://github.com/log4js-node/log4js-api).
## Documentation
Available [here](https://log4js-node.github.io/log4js-node/).
There's also [an example application](https://github.com/log4js-node/log4js-example).
## TypeScript
```ts
import log4js from "log4js";
log4js.configure({
appenders: { cheese: { type: "file", filename: "cheese.log" } },
categories: { default: { appenders: ["cheese"], level: "error" } }
});
const logger = log4js.getLogger();
logger.level = "debug";
logger.debug("Some debug messages");
```
## Contributing
We're always looking for people to help out. Jump on [slack](https://join.slack.com/t/log4js-node/shared_invite/enQtODkzMDQ3MzExMDczLWUzZmY0MmI0YWI1ZjFhODY0YjI0YmU1N2U5ZTRkOTYyYzg3MjY5NWI4M2FjZThjYjdiOGM0NjU2NzBmYTJjOGI) and discuss what you want to do. Also, take a look at the [rules](https://log4js-node.github.io/log4js-node/contrib-guidelines.html) before submitting a pull request.
## License
The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
keep the original copyright and author credits in place, except in sections that I have rewritten
extensively.
| log4js-node [](https://github.com/log4js-node/log4js-node/actions/workflows/codeql-analysis.yml) [](https://github.com/log4js-node/log4js-node/actions/workflows/node.js.yml)
===========
[](https://nodei.co/npm/log4js/)
This is a conversion of the [log4js](https://github.com/stritti/log4js)
framework to work with [node](http://nodejs.org). I started out just stripping out the browser-specific code and tidying up some of the javascript to work better in node. It grew from there. Although it's got a similar name to the Java library [log4j](https://logging.apache.org/log4j/2.x/), thinking that it will behave the same way will only bring you sorrow and confusion.
The full documentation is available [here](https://log4js-node.github.io/log4js-node/).
[Changes in version 3.x](https://log4js-node.github.io/log4js-node/v3-changes.md)
There have been a few changes between log4js 1.x and 2.x (and 0.x too). You should probably read this [migration guide](https://log4js-node.github.io/log4js-node/migration-guide.html) if things aren't working.
Out of the box it supports the following features:
- coloured console logging to stdout or stderr
- file appender, with configurable log rolling based on file size or date
- a logger for connect/express servers
- configurable log message layout/patterns
- different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
Optional appenders are available:
- [SMTP](https://github.com/log4js-node/smtp)
- [GELF](https://github.com/log4js-node/gelf)
- [Loggly](https://github.com/log4js-node/loggly)
- Logstash ([UDP](https://github.com/log4js-node/logstashUDP) and [HTTP](https://github.com/log4js-node/logstashHTTP))
- logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP))
- [RabbitMQ](https://github.com/log4js-node/rabbitmq)
- [Redis](https://github.com/log4js-node/redis)
- [Hipchat](https://github.com/log4js-node/hipchat)
- [Slack](https://github.com/log4js-node/slack)
- [mailgun](https://github.com/log4js-node/mailgun)
- [InfluxDB](https://github.com/rnd-debug/log4js-influxdb-appender)
## Getting help
Having problems? Jump on the [slack](https://join.slack.com/t/log4js-node/shared_invite/enQtODkzMDQ3MzExMDczLWUzZmY0MmI0YWI1ZjFhODY0YjI0YmU1N2U5ZTRkOTYyYzg3MjY5NWI4M2FjZThjYjdiOGM0NjU2NzBmYTJjOGI) channel, or create an issue. If you want to help out with the development, the slack channel is a good place to go as well.
## installation
```bash
npm install log4js
```
## usage
Minimalist version:
```javascript
var log4js = require("log4js");
var logger = log4js.getLogger();
logger.level = "debug";
logger.debug("Some debug messages");
```
By default, log4js will not output any logs (so that it can safely be used in libraries). The `level` for the `default` category is set to `OFF`. To enable logs, set the level (as in the example). This will then output to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see:
```bash
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
```
See example.js for a full example, but here's a snippet (also in `examples/fromreadme.js`):
```javascript
const log4js = require("log4js");
log4js.configure({
appenders: { cheese: { type: "file", filename: "cheese.log" } },
categories: { default: { appenders: ["cheese"], level: "error" } }
});
const logger = log4js.getLogger("cheese");
logger.trace("Entering cheese testing");
logger.debug("Got cheese.");
logger.info("Cheese is Comté.");
logger.warn("Cheese is quite smelly.");
logger.error("Cheese is too ripe!");
logger.fatal("Cheese was breeding ground for listeria.");
```
Output (in `cheese.log`):
```bash
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
```
## Note for library makers
If you're writing a library and would like to include support for log4js, without introducing a dependency headache for your users, take a look at [log4js-api](https://github.com/log4js-node/log4js-api).
## Documentation
Available [here](https://log4js-node.github.io/log4js-node/).
There's also [an example application](https://github.com/log4js-node/log4js-example).
## TypeScript
```ts
import log4js from "log4js";
log4js.configure({
appenders: { cheese: { type: "file", filename: "cheese.log" } },
categories: { default: { appenders: ["cheese"], level: "error" } }
});
const logger = log4js.getLogger();
logger.level = "debug";
logger.debug("Some debug messages");
```
## Contributing
We're always looking for people to help out. Jump on [slack](https://join.slack.com/t/log4js-node/shared_invite/enQtODkzMDQ3MzExMDczLWUzZmY0MmI0YWI1ZjFhODY0YjI0YmU1N2U5ZTRkOTYyYzg3MjY5NWI4M2FjZThjYjdiOGM0NjU2NzBmYTJjOGI) and discuss what you want to do. Also, take a look at the [rules](https://log4js-node.github.io/log4js-node/contrib-guidelines.html) before submitting a pull request.
## License
The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
keep the original copyright and author credits in place, except in sections that I have rewritten
extensively.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./v2-changes.md | CHANGES
=======
- no exit listeners defined for appenders by default. users should call log4js.shutdown in their exit listeners.
- context added to loggers (only logstash uses it so far)
- logstash split into two appenders (udp and http)
- no cwd, reload options in config
- configure only by calling configure, no manual adding of appenders, etc
- config format changed a lot, now need to define named appenders and at least one category
- appender format changed, will break any non-core appenders (maybe create adapter?)
- no replacement of console functions
| CHANGES
=======
- no exit listeners defined for appenders by default. users should call log4js.shutdown in their exit listeners.
- context added to loggers (only logstash uses it so far)
- logstash split into two appenders (udp and http)
- no cwd, reload options in config
- configure only by calling configure, no manual adding of appenders, etc
- config format changed a lot, now need to define named appenders and at least one category
- appender format changed, will break any non-core appenders (maybe create adapter?)
- no replacement of console functions
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/migration-guide.md | # Migrating from log4js versions older than 2.x
## Configuration
If you try to use your v1 configuration with v2 code, you'll most likely get an error that says something like 'must have property "appenders" of type object'. The format of the configuration object has changed (see the [api](api.md) docs for details). The main changes are a need for you to name your appenders, and you also have to define the default category. For example, if your v1 config looked like this:
```javascript
{ appenders: [
{ type: 'console' },
{
type: 'dateFile',
filename: 'logs/task',
pattern:"-dd.log",
alwaysIncludePattern: true,
category: 'task'
}
] }
```
Then your v2 config should be something like this:
```javascript
{
appenders: {
out: { type: 'console' },
task: {
type: 'dateFile',
filename: 'logs/task',
pattern: '-dd.log',
alwaysIncludePattern: true
}
},
categories: {
default: { appenders: [ 'out' ], level: 'info' },
task: { appenders: [ 'task' ], level: 'info' }
}
}}
```
The functions to define the configuration programmatically have been remove (`addAppender`, `loadAppender`, etc). All configuration should now be done through the single `configure` function, passing in a filename or object.
## Console replacement
V1 used to allow you to replace the node.js console functions with versions that would log to a log4js appender. This used to cause some weird errors, so I decided it was better to remove it from the log4js core functionality. If you still want to do this, you can replicate the behaviour with code similar to this:
```javascript
log4js.configure(...); // set up your categories and appenders
const logger = log4js.getLogger('console'); // any category will work
console.log = logger.info.bind(logger); // do the same for others - console.debug, etc.
```
## Config Reloading
Previous versions of log4js used to watch for changes in the configuration file and reload when it changed. It didn't always work well, sometimes leaving file handles or sockets open. This feature was removed in version 2.x. As a replacement, I'd suggest using a library like [watchr](https://www.npmjs.com/package/watchr) to notify you of file changes. Then you can call `log4js.shutdown` followed by `log4js.configure` again.
## Appenders
If you have written your own custom appenders, they will not work without modification in v2. See the guide to [writing appenders](writing-appenders.md) for details on how appenders work in 2.x. Note that if you want to write your appender to work with both 1.x and 2.x, then you can tell what version you're running in by examining the number of arguments passed to the `configure` function of your appender: 2 arguments means v1, 4 arguments means v2.
All the core appenders have been upgraded to work with v2, except for the clustered appender which has been removed. The core log4js code handles cluster mode transparently.
The `logFaces` appender was split into two versions to make testing easier and the code simpler; one has HTTP support, the other UDP.
## Exit listeners
Some appenders used to define their own `exit` listeners, and it was never clear whose responsibility it was to clean up resources. Now log4js does not define any `exit` listeners. Instead your application should register an `exit` listener, and call `log4js.shutdown` to be sure that all log messages get written before your application terminates.
## New Features
* MDC contexts - you can now add key-value pairs to a logger (for grouping all log messages from a particular user, for example). Support for these values exists in the [pattern layout](layouts.md), the logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender, and the [multi-file appender](multiFile.md).
* Automatic cluster support - log4js now handles clusters transparently
* Custom levels - you can define your own log levels in the configuration object, including the colours
* Improved performance - several changes have been made to improve performance, especially for the file appenders.
| # Migrating from log4js versions older than 2.x
## Configuration
If you try to use your v1 configuration with v2 code, you'll most likely get an error that says something like 'must have property "appenders" of type object'. The format of the configuration object has changed (see the [api](api.md) docs for details). The main changes are a need for you to name your appenders, and you also have to define the default category. For example, if your v1 config looked like this:
```javascript
{ appenders: [
{ type: 'console' },
{
type: 'dateFile',
filename: 'logs/task',
pattern:"-dd.log",
alwaysIncludePattern: true,
category: 'task'
}
] }
```
Then your v2 config should be something like this:
```javascript
{
appenders: {
out: { type: 'console' },
task: {
type: 'dateFile',
filename: 'logs/task',
pattern: '-dd.log',
alwaysIncludePattern: true
}
},
categories: {
default: { appenders: [ 'out' ], level: 'info' },
task: { appenders: [ 'task' ], level: 'info' }
}
}}
```
The functions to define the configuration programmatically have been remove (`addAppender`, `loadAppender`, etc). All configuration should now be done through the single `configure` function, passing in a filename or object.
## Console replacement
V1 used to allow you to replace the node.js console functions with versions that would log to a log4js appender. This used to cause some weird errors, so I decided it was better to remove it from the log4js core functionality. If you still want to do this, you can replicate the behaviour with code similar to this:
```javascript
log4js.configure(...); // set up your categories and appenders
const logger = log4js.getLogger('console'); // any category will work
console.log = logger.info.bind(logger); // do the same for others - console.debug, etc.
```
## Config Reloading
Previous versions of log4js used to watch for changes in the configuration file and reload when it changed. It didn't always work well, sometimes leaving file handles or sockets open. This feature was removed in version 2.x. As a replacement, I'd suggest using a library like [watchr](https://www.npmjs.com/package/watchr) to notify you of file changes. Then you can call `log4js.shutdown` followed by `log4js.configure` again.
## Appenders
If you have written your own custom appenders, they will not work without modification in v2. See the guide to [writing appenders](writing-appenders.md) for details on how appenders work in 2.x. Note that if you want to write your appender to work with both 1.x and 2.x, then you can tell what version you're running in by examining the number of arguments passed to the `configure` function of your appender: 2 arguments means v1, 4 arguments means v2.
All the core appenders have been upgraded to work with v2, except for the clustered appender which has been removed. The core log4js code handles cluster mode transparently.
The `logFaces` appender was split into two versions to make testing easier and the code simpler; one has HTTP support, the other UDP.
## Exit listeners
Some appenders used to define their own `exit` listeners, and it was never clear whose responsibility it was to clean up resources. Now log4js does not define any `exit` listeners. Instead your application should register an `exit` listener, and call `log4js.shutdown` to be sure that all log messages get written before your application terminates.
## New Features
* MDC contexts - you can now add key-value pairs to a logger (for grouping all log messages from a particular user, for example). Support for these values exists in the [pattern layout](layouts.md), the logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender, and the [multi-file appender](multiFile.md).
* Automatic cluster support - log4js now handles clusters transparently
* Custom levels - you can define your own log levels in the configuration object, including the colours
* Improved performance - several changes have been made to improve performance, especially for the file appenders.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/stdout.md | # Standard Output Appender
This appender writes all log events to the standard output stream. It is the default appender for log4js.
# Configuration
* `type` - `stdout`
* `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
# Example
```javascript
log4js.configure({
appenders: { 'out': { type: 'stdout' } },
categories: { default: { appenders: ['out'], level: 'info' } }
});
```
| # Standard Output Appender
This appender writes all log events to the standard output stream. It is the default appender for log4js.
# Configuration
* `type` - `stdout`
* `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
# Example
```javascript
log4js.configure({
appenders: { 'out': { type: 'stdout' } },
categories: { default: { appenders: ['out'], level: 'info' } }
});
```
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/clustering.md | # Clustering / Multi-process Logging
If you're running log4js in an application that uses [node's core cluster](https://nodejs.org/dist/latest-v8.x/docs/api/cluster.html) then log4js will transparently handle making sure the processes don't try to log at the same time. All logging is done on the master process, with the worker processes sending their log messages to the master via `process.send`. This ensures that you don't get multiple processes trying to write to the same file (or rotate the log files) at the same time.
This can cause problems in some rare circumstances, if you're experiencing weird logging problems, then use the `disableClustering: true` option in your log4js configuration to have every process behave as if it were the master process. Be careful if you're logging to files.
## I'm using PM2, but I'm not getting any logs!
To get log4js working with [PM2](http://pm2.keymetrics.io), you'll need to install the [pm2-intercom](https://www.npmjs.com/package/pm2-intercom) module.
```bash
pm2 install pm2-intercom
```
Then add the value `pm2: true` to your log4js configuration. If you're also using `node-config`, then you'll probably have renamed your `NODE_APP_INSTANCE` environment variable. If so, you'll also need to add `pm2InstanceVar: '<NEW_APP_INSTANCE_ID>'` where `<NEW_APP_INSTANCE_ID>` should be replaced with the new name you gave the instance environment variable.
```javascript
log4js.configure({
appenders: { out: { type: 'stdout'}},
categories: { default: { appenders: ['out'], level: 'info'}},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
```
## I'm using Passenger, but I'm not getting any logs!
[Passenger](https://www.phusionpassenger.com/library/) replaces the node.js core cluster module with a non-functional stub, so you won't see any output using log4js. To fix this, add `disableClustering: true` to your configuration. Again, be careful if you're logging to files.
## I'm not using clustering/pm2/passenger but I do have multiple processes that I'd like to all log to the same place
Ok, you probably want to look at the [tcp-server](tcp-server.md) and [tcp appender](tcp.md) documentation.
| # Clustering / Multi-process Logging
If you're running log4js in an application that uses [node's core cluster](https://nodejs.org/dist/latest-v8.x/docs/api/cluster.html) then log4js will transparently handle making sure the processes don't try to log at the same time. All logging is done on the master process, with the worker processes sending their log messages to the master via `process.send`. This ensures that you don't get multiple processes trying to write to the same file (or rotate the log files) at the same time.
This can cause problems in some rare circumstances, if you're experiencing weird logging problems, then use the `disableClustering: true` option in your log4js configuration to have every process behave as if it were the master process. Be careful if you're logging to files.
## I'm using PM2, but I'm not getting any logs!
To get log4js working with [PM2](http://pm2.keymetrics.io), you'll need to install the [pm2-intercom](https://www.npmjs.com/package/pm2-intercom) module.
```bash
pm2 install pm2-intercom
```
Then add the value `pm2: true` to your log4js configuration. If you're also using `node-config`, then you'll probably have renamed your `NODE_APP_INSTANCE` environment variable. If so, you'll also need to add `pm2InstanceVar: '<NEW_APP_INSTANCE_ID>'` where `<NEW_APP_INSTANCE_ID>` should be replaced with the new name you gave the instance environment variable.
```javascript
log4js.configure({
appenders: { out: { type: 'stdout'}},
categories: { default: { appenders: ['out'], level: 'info'}},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
```
## I'm using Passenger, but I'm not getting any logs!
[Passenger](https://www.phusionpassenger.com/library/) replaces the node.js core cluster module with a non-functional stub, so you won't see any output using log4js. To fix this, add `disableClustering: true` to your configuration. Again, be careful if you're logging to files.
## I'm not using clustering/pm2/passenger but I do have multiple processes that I'd like to all log to the same place
Ok, you probably want to look at the [tcp-server](tcp-server.md) and [tcp appender](tcp.md) documentation.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/recording.md | # Recording Appender
This appender stores the log events in memory. It is mainly useful for testing (see the tests for the category filter, for instance).
## Configuration
* `type` - `recording`
There is no other configuration for this appender.
## Usage
The array that stores log events is shared across all recording appender instances, and is accessible from the recording module. `require('<LOG4JS LIB DIR>/appenders/recording')` returns a module with the following functions exported:
* `replay` - returns `Array<LogEvent>` - get all the events recorded.
* `playback` - synonym for `replay`
* `reset` - clears the array of events recorded.
* `erase` - synonyms for `reset`
## Example
```javascript
const recording = require('log4js/lib/appenders/recording');
const log4js = require('log4js');
log4js.configure({
appenders: { vcr: { type: 'recording' } },
categories: { default: { appenders: ['vcr'], level: 'info' } }
});
const logger = log4js.getLogger();
logger.info("some log event");
const events = recording.replay(); // events is an array of LogEvent objects.
recording.erase(); // clear the appender's array.
```
| # Recording Appender
This appender stores the log events in memory. It is mainly useful for testing (see the tests for the category filter, for instance).
## Configuration
* `type` - `recording`
There is no other configuration for this appender.
## Usage
The array that stores log events is shared across all recording appender instances, and is accessible from the recording module. `require('<LOG4JS LIB DIR>/appenders/recording')` returns a module with the following functions exported:
* `replay` - returns `Array<LogEvent>` - get all the events recorded.
* `playback` - synonym for `replay`
* `reset` - clears the array of events recorded.
* `erase` - synonyms for `reset`
## Example
```javascript
const recording = require('log4js/lib/appenders/recording');
const log4js = require('log4js');
log4js.configure({
appenders: { vcr: { type: 'recording' } },
categories: { default: { appenders: ['vcr'], level: 'info' } }
});
const logger = log4js.getLogger();
logger.info("some log event");
const events = recording.replay(); // events is an array of LogEvent objects.
recording.erase(); // clear the appender's array.
```
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/tcp.md | # TCP Appender
The TCP appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. It's designed to work with the [tcp-server](tcp-server.md), but it doesn't necessarily have to, just make sure whatever is listening at the other end is expecting JSON objects as strings.
## Configuration
* `type` - `tcp`
* `port` - `integer` (optional, defaults to `5000`) - the port to send to
* `host` - `string` (optional, defaults to `localhost`) - the host/IP address to send to
* `endMsg` - `string` (optional, defaults to `__LOG4JS__`) - the delimiter that marks the end of a log message
* `layout` - `object` (optional, defaults to a serialized log event) - see [layouts](layouts.md)
## Example
```javascript
log4js.configure({
appenders: {
network: { type: 'tcp', host: 'log.server' }
},
categories: {
default: { appenders: ['network'], level: 'error' }
}
});
```
This will send all error messages to `log.server:5000`.
| # TCP Appender
The TCP appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. It's designed to work with the [tcp-server](tcp-server.md), but it doesn't necessarily have to, just make sure whatever is listening at the other end is expecting JSON objects as strings.
## Configuration
* `type` - `tcp`
* `port` - `integer` (optional, defaults to `5000`) - the port to send to
* `host` - `string` (optional, defaults to `localhost`) - the host/IP address to send to
* `endMsg` - `string` (optional, defaults to `__LOG4JS__`) - the delimiter that marks the end of a log message
* `layout` - `object` (optional, defaults to a serialized log event) - see [layouts](layouts.md)
## Example
```javascript
log4js.configure({
appenders: {
network: { type: 'tcp', host: 'log.server' }
},
categories: {
default: { appenders: ['network'], level: 'error' }
}
});
```
This will send all error messages to `log.server:5000`.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/noLogFilter.md | # Category Filter
The no log filter allows you to exclude the log events that an appender will record.
The log events will be excluded depending on the regular expressions provided in the configuration.
This can be useful when you debug your application and you want to exclude some noisily logs that are irrelevant to your investigation.
You can stop to log them through a regular expression.
## Configuration
* `type` - `"noLogFilter"`
* `exclude` - `string | Array<string>` - the regular expression (or the regular expressions if you provide an array of values) will be used for evaluating the events to pass to the appender. The events, which will match the regular expression, will be excluded and so not logged.
* `appender` - `string` - the name of an appender, defined in the same configuration, that you want to filter.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
filtered: {
type: 'noLogFilter',
exclude: 'not',
appender: 'everything' }
},
categories: {
default: { appenders: [ 'filtered' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
logger.debug('I will be not logged in all-the-logs.log');
```
Note that:
* an array of strings can be specified in the configuration
* a case insensitive match will be done
* empty strings will be not considered and so removed from the array of values
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
filtered: {
type: 'noLogFilter',
exclude: ['NOT', '\\d', ''],
appender: 'everything' }
},
categories: {
default: { appenders: [ 'filtered' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
logger.debug('I will be not logged in all-the-logs.log');
logger.debug('A 2nd message that will be excluded in all-the-logs.log');
``` | # Category Filter
The no log filter allows you to exclude the log events that an appender will record.
The log events will be excluded depending on the regular expressions provided in the configuration.
This can be useful when you debug your application and you want to exclude some noisily logs that are irrelevant to your investigation.
You can stop to log them through a regular expression.
## Configuration
* `type` - `"noLogFilter"`
* `exclude` - `string | Array<string>` - the regular expression (or the regular expressions if you provide an array of values) will be used for evaluating the events to pass to the appender. The events, which will match the regular expression, will be excluded and so not logged.
* `appender` - `string` - the name of an appender, defined in the same configuration, that you want to filter.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
filtered: {
type: 'noLogFilter',
exclude: 'not',
appender: 'everything' }
},
categories: {
default: { appenders: [ 'filtered' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
logger.debug('I will be not logged in all-the-logs.log');
```
Note that:
* an array of strings can be specified in the configuration
* a case insensitive match will be done
* empty strings will be not considered and so removed from the array of values
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
filtered: {
type: 'noLogFilter',
exclude: ['NOT', '\\d', ''],
appender: 'everything' }
},
categories: {
default: { appenders: [ 'filtered' ], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
logger.debug('I will be not logged in all-the-logs.log');
logger.debug('A 2nd message that will be excluded in all-the-logs.log');
``` | -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./SECURITY.md | # Security Policy
## Supported Versions
We're aiming to only support the latest major version of log4js. Older than that is usually *very* old.
| Version | Supported |
| ------- | ------------------ |
| 6.x | :white_check_mark: |
| < 6.0 | :x: |
## Reporting a Vulnerability
Report vulnerabilities via email to:
* Gareth Jones <[email protected]>
* Lam Wei Li <[email protected]>
Please put "[log4js:security]" in the subject line. We will aim to respond within a day or two.
| # Security Policy
## Supported Versions
We're aiming to only support the latest major version of log4js. Older than that is usually *very* old.
| Version | Supported |
| ------- | ------------------ |
| 6.x | :white_check_mark: |
| < 6.0 | :x: |
## Reporting a Vulnerability
Report vulnerabilities via email to:
* Gareth Jones <[email protected]>
* Lam Wei Li <[email protected]>
Please put "[log4js:security]" in the subject line. We will aim to respond within a day or two.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./types/test.ts | import * as log4js from './log4js';
log4js.configure('./filename');
const logger1 = log4js.getLogger();
logger1.level = 'debug';
logger1.debug("Some debug messages");
logger1.fatal({
whatever: 'foo'
})
const logger3 = log4js.getLogger('cheese');
logger3.trace('Entering cheese testing');
logger3.debug('Got cheese.');
logger3.info('Cheese is Gouda.');
logger3.warn('Cheese is quite smelly.');
logger3.error('Cheese is too ripe!');
logger3.fatal('Cheese was breeding ground for listeria.');
log4js.configure({
appenders: { cheese: { type: 'console', filename: 'cheese.log' } },
categories: { default: { appenders: ['cheese'], level: 'error' } }
});
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' }
},
categories: {
default: { appenders: ['out'], level: 'info' }
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
log4js.addLayout('json', config => function (logEvent) {
return JSON.stringify(logEvent) + config.separator;
});
log4js.configure({
appenders: {
out: { type: 'stdout', layout: { type: 'json', separator: ',' } }
},
categories: {
default: { appenders: ['out'], level: 'info' }
}
});
log4js.configure({
appenders: {
file: { type: 'dateFile', filename: 'thing.log', pattern: '.mm' }
},
categories: {
default: { appenders: ['file'], level: 'debug' }
}
});
const logger4 = log4js.getLogger('thing');
logger4.log('logging a thing');
const logger5 = log4js.getLogger('json-test');
logger5.info('this is just a test');
logger5.error('of a custom appender');
logger5.warn('that outputs json');
log4js.shutdown();
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' }
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' }
}
});
const logger6 = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', { some: 'otherObject', useful_for: 'debug purposes' });
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger6.trace('Entering cheese testing');
logger6.debug('Got cheese.');
logger6.info('Cheese is Gouda.');
logger6.log('Something funny about cheese.');
logger6.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger6.error('Cheese %s is too ripe!', 'gouda');
logger6.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
import { configure, getLogger } from './log4js';
configure('./filename');
const logger2 = getLogger();
logger2.level = 'debug';
logger2.debug("Some debug messages");
configure({
appenders: { cheese: { type: 'file', filename: 'cheese.log' } },
categories: { default: { appenders: ['cheese'], level: 'error' } }
});
log4js.configure('./filename').getLogger();
const logger7 = log4js.getLogger();
logger7.level = 'debug';
logger7.debug("Some debug messages");
const levels: log4js.Levels = log4js.levels;
const level: log4js.Level = levels.getLevel('info');
log4js.connectLogger(logger1, {
format: ':x, :y',
level: 'info',
context: true
});
log4js.connectLogger(logger2, {
format: (req, _res, format) => format(`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`)
});
//support for passing in an appender module
log4js.configure({
appenders: { thing: { type: { configure: () => () => {} }}},
categories: { default: { appenders: ['thing'], level: 'debug'}}
});
log4js.configure({
appenders: { rec: { type: 'recording' } },
categories: { default: { appenders: ['rec'], 'level': 'debug' } }
});
const logger8 = log4js.getLogger();
logger8.level = 'debug'
logger8.debug('This will go to the recording!')
logger8.debug('Another one')
const recording = log4js.recording()
const loggingEvents = recording.playback()
if (loggingEvents.length !== 2) {
throw new Error(`Expected 2 recorded events, got ${loggingEvents.length}`)
}
if (loggingEvents[0].data[0] !== 'This will go to the recording!') {
throw new Error(`Expected message 'This will go to the recording!', got ${loggingEvents[0].data[0]}`)
}
if (loggingEvents[1].data[0] !== 'Another one') {
throw new Error(`Expected message 'Another one', got ${loggingEvents[1].data[0]}`)
}
recording.reset()
const loggingEventsPostReset = recording.playback()
if (loggingEventsPostReset.length !== 0) {
throw new Error(`Expected 0 recorded events after reset, got ${loggingEventsPostReset.length}`)
}
| import * as log4js from './log4js';
log4js.configure('./filename');
const logger1 = log4js.getLogger();
logger1.level = 'debug';
logger1.debug("Some debug messages");
logger1.fatal({
whatever: 'foo'
})
const logger3 = log4js.getLogger('cheese');
logger3.trace('Entering cheese testing');
logger3.debug('Got cheese.');
logger3.info('Cheese is Gouda.');
logger3.warn('Cheese is quite smelly.');
logger3.error('Cheese is too ripe!');
logger3.fatal('Cheese was breeding ground for listeria.');
log4js.configure({
appenders: { cheese: { type: 'console', filename: 'cheese.log' } },
categories: { default: { appenders: ['cheese'], level: 'error' } }
});
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' }
},
categories: {
default: { appenders: ['out'], level: 'info' }
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
log4js.addLayout('json', config => function (logEvent) {
return JSON.stringify(logEvent) + config.separator;
});
log4js.configure({
appenders: {
out: { type: 'stdout', layout: { type: 'json', separator: ',' } }
},
categories: {
default: { appenders: ['out'], level: 'info' }
}
});
log4js.configure({
appenders: {
file: { type: 'dateFile', filename: 'thing.log', pattern: '.mm' }
},
categories: {
default: { appenders: ['file'], level: 'debug' }
}
});
const logger4 = log4js.getLogger('thing');
logger4.log('logging a thing');
const logger5 = log4js.getLogger('json-test');
logger5.info('this is just a test');
logger5.error('of a custom appender');
logger5.warn('that outputs json');
log4js.shutdown();
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' }
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' }
}
});
const logger6 = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', { some: 'otherObject', useful_for: 'debug purposes' });
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger6.trace('Entering cheese testing');
logger6.debug('Got cheese.');
logger6.info('Cheese is Gouda.');
logger6.log('Something funny about cheese.');
logger6.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger6.error('Cheese %s is too ripe!', 'gouda');
logger6.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
import { configure, getLogger } from './log4js';
configure('./filename');
const logger2 = getLogger();
logger2.level = 'debug';
logger2.debug("Some debug messages");
configure({
appenders: { cheese: { type: 'file', filename: 'cheese.log' } },
categories: { default: { appenders: ['cheese'], level: 'error' } }
});
log4js.configure('./filename').getLogger();
const logger7 = log4js.getLogger();
logger7.level = 'debug';
logger7.debug("Some debug messages");
const levels: log4js.Levels = log4js.levels;
const level: log4js.Level = levels.getLevel('info');
log4js.connectLogger(logger1, {
format: ':x, :y',
level: 'info',
context: true
});
log4js.connectLogger(logger2, {
format: (req, _res, format) => format(`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`)
});
//support for passing in an appender module
log4js.configure({
appenders: { thing: { type: { configure: () => () => {} }}},
categories: { default: { appenders: ['thing'], level: 'debug'}}
});
log4js.configure({
appenders: { rec: { type: 'recording' } },
categories: { default: { appenders: ['rec'], 'level': 'debug' } }
});
const logger8 = log4js.getLogger();
logger8.level = 'debug'
logger8.debug('This will go to the recording!')
logger8.debug('Another one')
const recording = log4js.recording()
const loggingEvents = recording.playback()
if (loggingEvents.length !== 2) {
throw new Error(`Expected 2 recorded events, got ${loggingEvents.length}`)
}
if (loggingEvents[0].data[0] !== 'This will go to the recording!') {
throw new Error(`Expected message 'This will go to the recording!', got ${loggingEvents[0].data[0]}`)
}
if (loggingEvents[1].data[0] !== 'Another one') {
throw new Error(`Expected message 'Another one', got ${loggingEvents[1].data[0]}`)
}
recording.reset()
const loggingEventsPostReset = recording.playback()
if (loggingEventsPostReset.length !== 0) {
throw new Error(`Expected 0 recorded events after reset, got ${loggingEventsPostReset.length}`)
}
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/appenders.md | # Log4js - Appenders
Appenders serialise log events to some form of output. They can write to files, send emails, send data over the network. All appenders have a `type` which determines which appender gets used. For example:
```javascript
const log4js = require('log4js');
log4js.configure({
appenders: {
out: { type: 'stdout' },
app: { type: 'file', filename: 'application.log' }
},
categories: {
default: { appenders: [ 'out', 'app' ], level: 'debug' }
}
});
```
This defines two appenders named 'out' and 'app'. 'out' uses the [stdout](stdout.md) appender which writes to standard out. 'app' uses the [file](file.md) appender, configured to write to 'application.log'.
## Core Appenders
The following appenders are included with log4js. Some require extra dependencies that are not included as part of log4js (the [smtp](https://github.com/log4js-node/smtp) appender needs [nodemailer](https://www.npmjs.com/package/nodemailer) for example), and these will be noted in the docs for that appender. If you don't use those appenders, then you don't need the extra dependencies.
* [categoryFilter](categoryFilter.md)
* [console](console.md)
* [dateFile](dateFile.md)
* [file](file.md)
* [fileSync](fileSync.md)
* [logLevelFilter](logLevelFilter.md)
* [multiFile](multiFile.md)
* [multiprocess](multiprocess.md)
* [noLogFilter](noLogFilter.md)
* [recording](recording.md)
* [stderr](stderr.md)
* [stdout](stdout.md)
* [tcp](tcp.md)
* [tcp-server](tcp-server.md)
## Optional Appenders
The following appenders are supported by log4js, but are no longer distributed with log4js core from version 3 onwards.
* [gelf](https://github.com/log4js-node/gelf)
* [hipchat](https://github.com/log4js-node/hipchat)
* [logFaces-HTTP](https://github.com/log4js-node/logFaces-HTTP)
* [logFaces-UDP](https://github.com/log4js-node/logFaces-UDP)
* [loggly](https://github.com/log4js-node/loggly)
* [logstashHTTP](https://github.com/log4js-node/logstashHTTP)
* [logstashUDP](https://github.com/log4js-node/logstashUDP)
* [mailgun](https://github.com/log4js-node/mailgun)
* [rabbitmq](https://github.com/log4js-node/rabbitmq)
* [redis](https://github.com/log4js-node/redis)
* [slack](https://github.com/log4js-node/slack)
* [smtp](https://github.com/log4js-node/smtp)
For example, if you were previously using the gelf appender (`type: 'gelf'`) then you should add `@log4js-node/gelf` to your dependencies and change the type to `type: '@log4js-node/gelf'`.
## Other Appenders
These appenders are maintained by its own authors and may be useful for you:
* [udp](https://github.com/iassasin/log4js-udp-appender)
## Custom Appenders
Log4js can load appenders from outside the core appenders. The `type` config value is used as a require path if no matching appender can be found. For example, the following configuration will attempt to load an appender from the module 'cheese/appender', passing the rest of the config for the appender to that module:
```javascript
log4js.configure({
appenders: { gouda: { type: 'cheese/appender', flavour: 'tasty' } },
categories: { default: { appenders: ['gouda'], level: 'debug' }}
});
```
Log4js checks the following places (in this order) for appenders based on the type value:
1. The core appenders: `require('./appenders/' + type)`
2. node_modules: `require(type)`
3. relative to the main file of your application: `require(path.dirname(require.main.filename) + '/' + type)`
4. relative to the process' current working directory: `require(process.cwd() + '/' + type)`
If you want to write your own appender, read the [documentation](writing-appenders.md) first.
## Advanced configuration
If you've got a custom appender of your own, or are using webpack (or some other bundler), you may find it easier to pass
in the appender module in the config instead of loading from the node.js require path. Here's an example:
```javascript
const myAppenderModule = {
configure: (config, layouts, findAppender, levels) => { /* ...your appender config... */ }
};
log4js.configure({
appenders: { custom: { type: myAppenderModule } },
categories: { default: { appenders: ['custom'], level: 'debug' } }
});
```
| # Log4js - Appenders
Appenders serialise log events to some form of output. They can write to files, send emails, send data over the network. All appenders have a `type` which determines which appender gets used. For example:
```javascript
const log4js = require('log4js');
log4js.configure({
appenders: {
out: { type: 'stdout' },
app: { type: 'file', filename: 'application.log' }
},
categories: {
default: { appenders: [ 'out', 'app' ], level: 'debug' }
}
});
```
This defines two appenders named 'out' and 'app'. 'out' uses the [stdout](stdout.md) appender which writes to standard out. 'app' uses the [file](file.md) appender, configured to write to 'application.log'.
## Core Appenders
The following appenders are included with log4js. Some require extra dependencies that are not included as part of log4js (the [smtp](https://github.com/log4js-node/smtp) appender needs [nodemailer](https://www.npmjs.com/package/nodemailer) for example), and these will be noted in the docs for that appender. If you don't use those appenders, then you don't need the extra dependencies.
* [categoryFilter](categoryFilter.md)
* [console](console.md)
* [dateFile](dateFile.md)
* [file](file.md)
* [fileSync](fileSync.md)
* [logLevelFilter](logLevelFilter.md)
* [multiFile](multiFile.md)
* [multiprocess](multiprocess.md)
* [noLogFilter](noLogFilter.md)
* [recording](recording.md)
* [stderr](stderr.md)
* [stdout](stdout.md)
* [tcp](tcp.md)
* [tcp-server](tcp-server.md)
## Optional Appenders
The following appenders are supported by log4js, but are no longer distributed with log4js core from version 3 onwards.
* [gelf](https://github.com/log4js-node/gelf)
* [hipchat](https://github.com/log4js-node/hipchat)
* [logFaces-HTTP](https://github.com/log4js-node/logFaces-HTTP)
* [logFaces-UDP](https://github.com/log4js-node/logFaces-UDP)
* [loggly](https://github.com/log4js-node/loggly)
* [logstashHTTP](https://github.com/log4js-node/logstashHTTP)
* [logstashUDP](https://github.com/log4js-node/logstashUDP)
* [mailgun](https://github.com/log4js-node/mailgun)
* [rabbitmq](https://github.com/log4js-node/rabbitmq)
* [redis](https://github.com/log4js-node/redis)
* [slack](https://github.com/log4js-node/slack)
* [smtp](https://github.com/log4js-node/smtp)
For example, if you were previously using the gelf appender (`type: 'gelf'`) then you should add `@log4js-node/gelf` to your dependencies and change the type to `type: '@log4js-node/gelf'`.
## Other Appenders
These appenders are maintained by its own authors and may be useful for you:
* [udp](https://github.com/iassasin/log4js-udp-appender)
## Custom Appenders
Log4js can load appenders from outside the core appenders. The `type` config value is used as a require path if no matching appender can be found. For example, the following configuration will attempt to load an appender from the module 'cheese/appender', passing the rest of the config for the appender to that module:
```javascript
log4js.configure({
appenders: { gouda: { type: 'cheese/appender', flavour: 'tasty' } },
categories: { default: { appenders: ['gouda'], level: 'debug' }}
});
```
Log4js checks the following places (in this order) for appenders based on the type value:
1. The core appenders: `require('./appenders/' + type)`
2. node_modules: `require(type)`
3. relative to the main file of your application: `require(path.dirname(require.main.filename) + '/' + type)`
4. relative to the process' current working directory: `require(process.cwd() + '/' + type)`
If you want to write your own appender, read the [documentation](writing-appenders.md) first.
## Advanced configuration
If you've got a custom appender of your own, or are using webpack (or some other bundler), you may find it easier to pass
in the appender module in the config instead of loading from the node.js require path. Here's an example:
```javascript
const myAppenderModule = {
configure: (config, layouts, findAppender, levels) => { /* ...your appender config... */ }
};
log4js.configure({
appenders: { custom: { type: myAppenderModule } },
categories: { default: { appenders: ['custom'], level: 'debug' } }
});
```
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/console.md | # Console Appender
This appender uses node's console object to write log events. It can also be used in the browser, if you're using browserify or something similar. Be aware that writing a high volume of output to the console can make your application use a lot of memory. If you experience this problem, try switching to the [stdout](stdout.md) appender.
# Configuration
* `type` - `console`
* `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
Note that all log events are output using `console.log` regardless of the event's level (so `ERROR` events will not be logged using `console.error`).
# Example
```javascript
log4js.configure({
appenders: { console: { type: 'console' } },
categories: { default: { appenders: [ 'console' ], level: 'info' } }
});
```
| # Console Appender
This appender uses node's console object to write log events. It can also be used in the browser, if you're using browserify or something similar. Be aware that writing a high volume of output to the console can make your application use a lot of memory. If you experience this problem, try switching to the [stdout](stdout.md) appender.
# Configuration
* `type` - `console`
* `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
Note that all log events are output using `console.log` regardless of the event's level (so `ERROR` events will not be logged using `console.error`).
# Example
```javascript
log4js.configure({
appenders: { console: { type: 'console' } },
categories: { default: { appenders: [ 'console' ], level: 'info' } }
});
```
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/api.md | ## API
## configuration - `log4js.configure(object || string)`
There is one entry point for configuring log4js. A string argument is treated as a filename to load configuration from. Config files should be JSON, and contain a configuration object (see format below). You can also pass a configuration object directly to `configure`.
Configuration should take place immediately after requiring log4js for the first time in your application. If you do not call `configure`, log4js will use `LOG4JS_CONFIG` (if defined) or the default config. The default config defines one appender, which would log to stdout with the coloured layout, but also defines the default log level to be `OFF` - which means no logs will be output.
If you are using `cluster`, then include the call to `configure` in the worker processes as well as the master. That way the worker processes will pick up the right levels for your categories, and any custom levels you may have defined. Appenders will only be defined on the master process, so there is no danger of multiple processes attempting to write to the same appender. No special configuration is needed to use log4js with clusters, unlike previous versions.
Configuration objects must define at least one appender, and a default category. Log4js will throw an exception if the configuration is invalid.
`configure` method call returns the configured log4js object.
### Configuration Object
Properties:
* `levels` (optional, object) - used for defining custom log levels, or redefining existing ones; this is a map with the level name as the key (string, case insensitive), and an object as the value. The object should have two properties: the level value (integer) as the value, and the colour. Log levels are used to assign importance to log messages, with the integer value being used to sort them. If you do not specify anything in your configuration, the default values are used (ALL < TRACE < DEBUG < INFO < WARN < ERROR < FATAL < MARK < OFF - note that OFF is intended to be used to turn off logging, not as a level for actual logging, i.e. you would never call `logger.off('some log message')`). Levels defined here are used in addition to the default levels, with the integer value being used to determine their relation to the default levels. If you define a level with the same name as a default level, then the integer value in the config takes precedence. Level names must begin with a letter, and can only contain letters, numbers and underscores.
* `appenders` (object) - a map of named appenders (string) to appender definitions (object); appender definitions must have a property `type` (string) - other properties depend on the appender type.
* `categories` (object) - a map of named categories (string) to category definitions (object). You must define the `default` category which is used for all log events that do not match a specific category. Category definitions have two properties:
* `appenders` (array of strings) - the list of appender names to be used for this category. A category must have at least one appender.
* `level` (string, case insensitive) - the minimum log level that this category will send to the appenders. For example, if set to 'error' then the appenders will only receive log events of level 'error', 'fatal', 'mark' - log events of 'info', 'warn', 'debug', or 'trace' will be ignored.
* `enableCallStack` (boolean, optional, defaults to `false`) - setting this to `true` will make log events for this category use the call stack to generate line numbers and file names in the event. See [pattern layout](layouts.md) for how to output these values in your appenders.
* `pm2` (boolean) (optional) - set this to true if you're running your app using [pm2](http://pm2.keymetrics.io), otherwise logs will not work (you'll also need to install pm2-intercom as pm2 module: `pm2 install pm2-intercom`)
* `pm2InstanceVar` (string) (optional, defaults to 'NODE_APP_INSTANCE') - set this if you're using pm2 and have changed the default name of the NODE_APP_INSTANCE variable.
* `disableClustering` (boolean) (optional) - set this to true if you liked the way log4js used to just ignore clustered environments, or you're having trouble with PM2 logging. Each worker process will do its own logging. Be careful with this if you're logging to files, weirdness can occur.
## Loggers - `log4js.getLogger([category])`
This function takes a single optional string argument to denote the category to be used for log events on this logger. If no category is specified, the events will be routed to the appender for the `default` category. The function returns a `Logger` object which has its level set to the level specified for that category in the config and implements the following functions:
* `<level>(args...)` - where `<level>` can be any of the lower case names of the levels (including any custom levels defined). For example: `logger.info('some info')` will dispatch a log event with a level of info. If you're using the basic, coloured or message pass-through [layouts](layouts.md), the logged string will have its formatting (placeholders like `%s`, `%d`, etc) delegated to [util.format](https://nodejs.org/api/util.html#util_util_format_format_args).
* `is<level>Enabled()` - returns true if a log event of level <level> (camel case) would be dispatched to the appender defined for the logger's category. For example: `logger.isInfoEnabled()` will return true if the level for the logger is INFO or lower.
* `addContext(<key>,<value>)` - where `<key>` is a string, `<value>` can be anything. This stores a key-value pair that is added to all log events generated by the logger. Uses would be to add ids for tracking a user through your application. Currently only the `logFaces` appenders make use of the context values.
* `removeContext(<key>)` - removes a previously defined key-value pair from the context.
* `clearContext()` - removes all context pairs from the logger.
* `setParseCallStackFunction(function)` - Allow to override the default way to parse the callstack data for the layout pattern, a generic javascript Error object is passed to the function. Must return an object with properties : `functionName` / `fileName` / `lineNumber` / `columnNumber` / `callStack`. Can for example be used if all of your log call are made from one "debug" class and you would to "erase" this class from the callstack to only show the function which called your "debug" class.
The `Logger` object has the following properties:
* `level` - where `level` is a log4js level or a string that matches a level (e.g. 'info', 'INFO', etc). This allows overriding the configured level for this logger. Changing this value applies to all loggers of the same category.
* `useCallStack` - where `useCallStack` is a boolean to indicate if log events for this category use the call stack to generate line numbers and file names in the event. This allows overriding the configured useCallStack for this logger. Changing this value applies to all loggers of the same category.
## Shutdown - `log4js.shutdown(cb)`
`shutdown` accepts a callback that will be called when log4js has closed all appenders and finished writing log events. Use this when your programme exits to make sure all your logs are written to files, sockets are closed, etc.
## Custom Layouts - `log4js.addLayout(type, fn)`
This function is used to add user-defined layout functions. See [layouts](layouts.md) for more details and an example.
| ## API
## configuration - `log4js.configure(object || string)`
There is one entry point for configuring log4js. A string argument is treated as a filename to load configuration from. Config files should be JSON, and contain a configuration object (see format below). You can also pass a configuration object directly to `configure`.
Configuration should take place immediately after requiring log4js for the first time in your application. If you do not call `configure`, log4js will use `LOG4JS_CONFIG` (if defined) or the default config. The default config defines one appender, which would log to stdout with the coloured layout, but also defines the default log level to be `OFF` - which means no logs will be output.
If you are using `cluster`, then include the call to `configure` in the worker processes as well as the master. That way the worker processes will pick up the right levels for your categories, and any custom levels you may have defined. Appenders will only be defined on the master process, so there is no danger of multiple processes attempting to write to the same appender. No special configuration is needed to use log4js with clusters, unlike previous versions.
Configuration objects must define at least one appender, and a default category. Log4js will throw an exception if the configuration is invalid.
`configure` method call returns the configured log4js object.
### Configuration Object
Properties:
* `levels` (optional, object) - used for defining custom log levels, or redefining existing ones; this is a map with the level name as the key (string, case insensitive), and an object as the value. The object should have two properties: the level value (integer) as the value, and the colour. Log levels are used to assign importance to log messages, with the integer value being used to sort them. If you do not specify anything in your configuration, the default values are used (ALL < TRACE < DEBUG < INFO < WARN < ERROR < FATAL < MARK < OFF - note that OFF is intended to be used to turn off logging, not as a level for actual logging, i.e. you would never call `logger.off('some log message')`). Levels defined here are used in addition to the default levels, with the integer value being used to determine their relation to the default levels. If you define a level with the same name as a default level, then the integer value in the config takes precedence. Level names must begin with a letter, and can only contain letters, numbers and underscores.
* `appenders` (object) - a map of named appenders (string) to appender definitions (object); appender definitions must have a property `type` (string) - other properties depend on the appender type.
* `categories` (object) - a map of named categories (string) to category definitions (object). You must define the `default` category which is used for all log events that do not match a specific category. Category definitions have two properties:
* `appenders` (array of strings) - the list of appender names to be used for this category. A category must have at least one appender.
* `level` (string, case insensitive) - the minimum log level that this category will send to the appenders. For example, if set to 'error' then the appenders will only receive log events of level 'error', 'fatal', 'mark' - log events of 'info', 'warn', 'debug', or 'trace' will be ignored.
* `enableCallStack` (boolean, optional, defaults to `false`) - setting this to `true` will make log events for this category use the call stack to generate line numbers and file names in the event. See [pattern layout](layouts.md) for how to output these values in your appenders.
* `pm2` (boolean) (optional) - set this to true if you're running your app using [pm2](http://pm2.keymetrics.io), otherwise logs will not work (you'll also need to install pm2-intercom as pm2 module: `pm2 install pm2-intercom`)
* `pm2InstanceVar` (string) (optional, defaults to 'NODE_APP_INSTANCE') - set this if you're using pm2 and have changed the default name of the NODE_APP_INSTANCE variable.
* `disableClustering` (boolean) (optional) - set this to true if you liked the way log4js used to just ignore clustered environments, or you're having trouble with PM2 logging. Each worker process will do its own logging. Be careful with this if you're logging to files, weirdness can occur.
## Loggers - `log4js.getLogger([category])`
This function takes a single optional string argument to denote the category to be used for log events on this logger. If no category is specified, the events will be routed to the appender for the `default` category. The function returns a `Logger` object which has its level set to the level specified for that category in the config and implements the following functions:
* `<level>(args...)` - where `<level>` can be any of the lower case names of the levels (including any custom levels defined). For example: `logger.info('some info')` will dispatch a log event with a level of info. If you're using the basic, coloured or message pass-through [layouts](layouts.md), the logged string will have its formatting (placeholders like `%s`, `%d`, etc) delegated to [util.format](https://nodejs.org/api/util.html#util_util_format_format_args).
* `is<level>Enabled()` - returns true if a log event of level <level> (camel case) would be dispatched to the appender defined for the logger's category. For example: `logger.isInfoEnabled()` will return true if the level for the logger is INFO or lower.
* `addContext(<key>,<value>)` - where `<key>` is a string, `<value>` can be anything. This stores a key-value pair that is added to all log events generated by the logger. Uses would be to add ids for tracking a user through your application. Currently only the `logFaces` appenders make use of the context values.
* `removeContext(<key>)` - removes a previously defined key-value pair from the context.
* `clearContext()` - removes all context pairs from the logger.
* `setParseCallStackFunction(function)` - Allow to override the default way to parse the callstack data for the layout pattern, a generic javascript Error object is passed to the function. Must return an object with properties : `functionName` / `fileName` / `lineNumber` / `columnNumber` / `callStack`. Can for example be used if all of your log call are made from one "debug" class and you would to "erase" this class from the callstack to only show the function which called your "debug" class.
The `Logger` object has the following properties:
* `level` - where `level` is a log4js level or a string that matches a level (e.g. 'info', 'INFO', etc). This allows overriding the configured level for this logger. Changing this value applies to all loggers of the same category.
* `useCallStack` - where `useCallStack` is a boolean to indicate if log events for this category use the call stack to generate line numbers and file names in the event. This allows overriding the configured useCallStack for this logger. Changing this value applies to all loggers of the same category.
## Shutdown - `log4js.shutdown(cb)`
`shutdown` accepts a callback that will be called when log4js has closed all appenders and finished writing log events. Use this when your programme exits to make sure all your logs are written to files, sockets are closed, etc.
## Custom Layouts - `log4js.addLayout(type, fn)`
This function is used to add user-defined layout functions. See [layouts](layouts.md) for more details and an example.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/contrib-guidelines.md | # Want to help?
I love pull requests, and I need all the help I can get. However, there are a few rules to follow if you want a better chance of having your pull request merged:
* Fork the repo, make a feature branch just for your changes
* On the branch, only commit changes for the feature you're adding. Each pull request should concentrate on a single change - don't mix multiple features.
* Your feature should be covered by tests. Run the tests with npm test. This is very important - without tests, your feature may be broken by subsequent changes and I may never know. Plus it's always nice to know that your changes work :-)
* Don't bump the npm version - yours may not be the only feature that makes it into a version, and you don't know when your pull request may get merged (the version may have changed by then).
| # Want to help?
I love pull requests, and I need all the help I can get. However, there are a few rules to follow if you want a better chance of having your pull request merged:
* Fork the repo, make a feature branch just for your changes
* On the branch, only commit changes for the feature you're adding. Each pull request should concentrate on a single change - don't mix multiple features.
* Your feature should be covered by tests. Run the tests with npm test. This is very important - without tests, your feature may be broken by subsequent changes and I may never know. Plus it's always nice to know that your changes work :-)
* Don't bump the npm version - yours may not be the only feature that makes it into a version, and you don't know when your pull request may get merged (the version may have changed by then).
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/multiprocess.md | # Multiprocess Appender
*You probably want to use the [tcp server](tcp-server.md) or [tcp appender](tcp.md) instead of this - they are more flexible*
*Note that if you're just using node core's `cluster` module then you don't need to use this appender - log4js will handle logging within the cluster transparently.*
The multiprocess appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly.
## Configuration
* `type` - `multiprocess`
* `mode` - `master|worker` - controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server.
* `appender` - `string` (only needed if `mode` == `master`)- the name of the appender to send the log events to
* `loggerPort` - `integer` (optional, defaults to `5000`) - the port to listen on, or send to
* `loggerHost` - `string` (optional, defaults to `localhost`) - the host/IP address to listen on, or send to
## Example (master)
```javascript
log4js.configure({
appenders: {
file: { type: 'file', filename: 'all-the-logs.log' },
server: { type: 'multiprocess', mode: 'master', appender: 'file', loggerHost: '0.0.0.0' }
},
categories: {
default: { appenders: ['file'], level: 'info' }
}
});
```
This creates a log server listening on port 5000, on all IP addresses the host has assigned to it. Note that the appender is not included in the appenders listed for the categories. Also note that the multiprocess master appender will send every event it receives to the underlying appender, regardless of level settings.
## Example (worker)
```javascript
log4js.configure({
appenders: {
network: { type: 'multiprocess', mode: 'worker', loggerHost: 'log.server' }
},
categories: {
default: { appenders: ['network'], level: 'error' }
}
});
```
This will send all error messages to `log.server:5000`.
| # Multiprocess Appender
*You probably want to use the [tcp server](tcp-server.md) or [tcp appender](tcp.md) instead of this - they are more flexible*
*Note that if you're just using node core's `cluster` module then you don't need to use this appender - log4js will handle logging within the cluster transparently.*
The multiprocess appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly.
## Configuration
* `type` - `multiprocess`
* `mode` - `master|worker` - controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server.
* `appender` - `string` (only needed if `mode` == `master`)- the name of the appender to send the log events to
* `loggerPort` - `integer` (optional, defaults to `5000`) - the port to listen on, or send to
* `loggerHost` - `string` (optional, defaults to `localhost`) - the host/IP address to listen on, or send to
## Example (master)
```javascript
log4js.configure({
appenders: {
file: { type: 'file', filename: 'all-the-logs.log' },
server: { type: 'multiprocess', mode: 'master', appender: 'file', loggerHost: '0.0.0.0' }
},
categories: {
default: { appenders: ['file'], level: 'info' }
}
});
```
This creates a log server listening on port 5000, on all IP addresses the host has assigned to it. Note that the appender is not included in the appenders listed for the categories. Also note that the multiprocess master appender will send every event it receives to the underlying appender, regardless of level settings.
## Example (worker)
```javascript
log4js.configure({
appenders: {
network: { type: 'multiprocess', mode: 'worker', loggerHost: 'log.server' }
},
categories: {
default: { appenders: ['network'], level: 'error' }
}
});
```
This will send all error messages to `log.server:5000`.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./lib/connect-logger.js | /* eslint no-underscore-dangle: ["error", { "allow": ["__statusCode", "_remoteAddress", "__headers", "_logging"] }] */
const levels = require("./levels");
const DEFAULT_FORMAT =
":remote-addr - -" +
' ":method :url HTTP/:http-version"' +
' :status :content-length ":referrer"' +
' ":user-agent"';
/**
* Return request url path,
* adding this function prevents the Cyclomatic Complexity,
* for the assemble_tokens function at low, to pass the tests.
*
* @param {IncomingMessage} req
* @return {string}
* @api private
*/
function getUrl(req) {
return req.originalUrl || req.url;
}
/**
* Adds custom {token, replacement} objects to defaults,
* overwriting the defaults if any tokens clash
*
* @param {IncomingMessage} req
* @param {ServerResponse} res
* @param {Array} customTokens
* [{ token: string-or-regexp, replacement: string-or-replace-function }]
* @return {Array}
*/
function assembleTokens(req, res, customTokens) {
const arrayUniqueTokens = array => {
const a = array.concat();
for (let i = 0; i < a.length; ++i) {
for (let j = i + 1; j < a.length; ++j) {
// not === because token can be regexp object
// eslint-disable-next-line eqeqeq
if (a[i].token == a[j].token) {
a.splice(j--, 1); // eslint-disable-line no-plusplus
}
}
}
return a;
};
const defaultTokens = [];
defaultTokens.push({ token: ":url", replacement: getUrl(req) });
defaultTokens.push({ token: ":protocol", replacement: req.protocol });
defaultTokens.push({ token: ":hostname", replacement: req.hostname });
defaultTokens.push({ token: ":method", replacement: req.method });
defaultTokens.push({
token: ":status",
replacement: res.__statusCode || res.statusCode
});
defaultTokens.push({
token: ":response-time",
replacement: res.responseTime
});
defaultTokens.push({ token: ":date", replacement: new Date().toUTCString() });
defaultTokens.push({
token: ":referrer",
replacement: req.headers.referer || req.headers.referrer || ""
});
defaultTokens.push({
token: ":http-version",
replacement: `${req.httpVersionMajor}.${req.httpVersionMinor}`
});
defaultTokens.push({
token: ":remote-addr",
replacement:
req.headers["x-forwarded-for"] ||
req.ip ||
req._remoteAddress ||
(req.socket &&
(req.socket.remoteAddress ||
(req.socket.socket && req.socket.socket.remoteAddress)))
});
defaultTokens.push({
token: ":user-agent",
replacement: req.headers["user-agent"]
});
defaultTokens.push({
token: ":content-length",
replacement:
res.getHeader("content-length") ||
(res.__headers && res.__headers["Content-Length"]) ||
"-"
});
defaultTokens.push({
token: /:req\[([^\]]+)]/g,
replacement(_, field) {
return req.headers[field.toLowerCase()];
}
});
defaultTokens.push({
token: /:res\[([^\]]+)]/g,
replacement(_, field) {
return (
res.getHeader(field.toLowerCase()) ||
(res.__headers && res.__headers[field])
);
}
});
return arrayUniqueTokens(customTokens.concat(defaultTokens));
}
/**
* Return formatted log line.
*
* @param {string} str
* @param {Array} tokens
* @return {string}
* @api private
*/
function format(str, tokens) {
for (let i = 0; i < tokens.length; i++) {
str = str.replace(tokens[i].token, tokens[i].replacement);
}
return str;
}
/**
* Return RegExp Object about nolog
*
* @param {(string|Array)} nolog
* @return {RegExp}
* @api private
*
* syntax
* 1. String
* 1.1 "\\.gif"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
* LOGGING http://example.com/hoge.agif
* 1.2 in "\\.gif|\\.jpg$"
* NOT LOGGING http://example.com/hoge.gif and
* http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
* LOGGING http://example.com/hoge.agif,
* http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
* 1.3 in "\\.(gif|jpe?g|png)$"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
* LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
* 2. RegExp
* 2.1 in /\.(gif|jpe?g|png)$/
* SAME AS 1.3
* 3. Array
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
* SAME AS "\\.jpg|\\.png|\\.gif"
*/
function createNoLogCondition(nolog) {
let regexp = null;
if (nolog instanceof RegExp) {
regexp = nolog;
}
if (typeof nolog === "string") {
regexp = new RegExp(nolog);
}
if (Array.isArray(nolog)) {
// convert to strings
const regexpsAsStrings = nolog.map(reg => (reg.source ? reg.source : reg));
regexp = new RegExp(regexpsAsStrings.join("|"));
}
return regexp;
}
/**
* Allows users to define rules around status codes to assign them to a specific
* logging level.
* There are two types of rules:
* - RANGE: matches a code within a certain range
* E.g. { 'from': 200, 'to': 299, 'level': 'info' }
* - CONTAINS: matches a code to a set of expected codes
* E.g. { 'codes': [200, 203], 'level': 'debug' }
* Note*: Rules are respected only in order of prescendence.
*
* @param {Number} statusCode
* @param {Level} currentLevel
* @param {Object} ruleSet
* @return {Level}
* @api private
*/
function matchRules(statusCode, currentLevel, ruleSet) {
let level = currentLevel;
if (ruleSet) {
const matchedRule = ruleSet.find(rule => {
let ruleMatched = false;
if (rule.from && rule.to) {
ruleMatched = statusCode >= rule.from && statusCode <= rule.to;
} else {
ruleMatched = rule.codes.indexOf(statusCode) !== -1;
}
return ruleMatched;
});
if (matchedRule) {
level = levels.getLevel(matchedRule.level, level);
}
}
return level;
}
/**
* Log requests with the given `options` or a `format` string.
*
* Options:
*
* - `format` Format string, see below for tokens
* - `level` A log4js levels instance. Supports also 'auto'
* - `nolog` A string or RegExp to exclude target logs
* - `statusRules` A array of rules for setting specific logging levels base on status codes
* - `context` Whether to add a response of express to the context
*
* Tokens:
*
* - `:req[header]` ex: `:req[Accept]`
* - `:res[header]` ex: `:res[Content-Length]`
* - `:http-version`
* - `:response-time`
* - `:remote-addr`
* - `:date`
* - `:method`
* - `:url`
* - `:referrer`
* - `:user-agent`
* - `:status`
*
* @return {Function}
* @param logger4js
* @param options
* @api public
*/
module.exports = function getLogger(logger4js, options) {
if (typeof options === "string" || typeof options === "function") {
options = { format: options };
} else {
options = options || {};
}
const thisLogger = logger4js;
let level = levels.getLevel(options.level, levels.INFO);
const fmt = options.format || DEFAULT_FORMAT;
const nolog = createNoLogCondition(options.nolog);
return (req, res, next) => {
// mount safety
if (req._logging) return next();
// nologs
if (nolog && nolog.test(req.originalUrl)) return next();
if (thisLogger.isLevelEnabled(level) || options.level === "auto") {
const start = new Date();
const { writeHead } = res;
// flag as logging
req._logging = true;
// proxy for statusCode.
res.writeHead = (code, headers) => {
res.writeHead = writeHead;
res.writeHead(code, headers);
res.__statusCode = code;
res.__headers = headers || {};
};
// hook on end request to emit the log entry of the HTTP request.
let finished = false;
const handler = () => {
if (finished) {
return;
}
finished = true;
res.responseTime = new Date() - start;
// status code response level handling
if (res.statusCode && options.level === "auto") {
level = levels.INFO;
if (res.statusCode >= 300) level = levels.WARN;
if (res.statusCode >= 400) level = levels.ERROR;
}
level = matchRules(res.statusCode, level, options.statusRules);
const combinedTokens = assembleTokens(req, res, options.tokens || []);
if (options.context) thisLogger.addContext("res", res);
if (typeof fmt === "function") {
const line = fmt(req, res, str => format(str, combinedTokens));
if (line) thisLogger.log(level, line);
} else {
thisLogger.log(level, format(fmt, combinedTokens));
}
if (options.context) thisLogger.removeContext("res");
};
res.on("end", handler);
res.on("finish", handler);
res.on("error", handler);
res.on("close", handler);
}
// ensure next gets always called
return next();
};
};
| /* eslint no-underscore-dangle: ["error", { "allow": ["__statusCode", "_remoteAddress", "__headers", "_logging"] }] */
const levels = require("./levels");
const DEFAULT_FORMAT =
":remote-addr - -" +
' ":method :url HTTP/:http-version"' +
' :status :content-length ":referrer"' +
' ":user-agent"';
/**
* Return request url path,
* adding this function prevents the Cyclomatic Complexity,
* for the assemble_tokens function at low, to pass the tests.
*
* @param {IncomingMessage} req
* @return {string}
* @api private
*/
function getUrl(req) {
return req.originalUrl || req.url;
}
/**
* Adds custom {token, replacement} objects to defaults,
* overwriting the defaults if any tokens clash
*
* @param {IncomingMessage} req
* @param {ServerResponse} res
* @param {Array} customTokens
* [{ token: string-or-regexp, replacement: string-or-replace-function }]
* @return {Array}
*/
function assembleTokens(req, res, customTokens) {
const arrayUniqueTokens = array => {
const a = array.concat();
for (let i = 0; i < a.length; ++i) {
for (let j = i + 1; j < a.length; ++j) {
// not === because token can be regexp object
// eslint-disable-next-line eqeqeq
if (a[i].token == a[j].token) {
a.splice(j--, 1); // eslint-disable-line no-plusplus
}
}
}
return a;
};
const defaultTokens = [];
defaultTokens.push({ token: ":url", replacement: getUrl(req) });
defaultTokens.push({ token: ":protocol", replacement: req.protocol });
defaultTokens.push({ token: ":hostname", replacement: req.hostname });
defaultTokens.push({ token: ":method", replacement: req.method });
defaultTokens.push({
token: ":status",
replacement: res.__statusCode || res.statusCode
});
defaultTokens.push({
token: ":response-time",
replacement: res.responseTime
});
defaultTokens.push({ token: ":date", replacement: new Date().toUTCString() });
defaultTokens.push({
token: ":referrer",
replacement: req.headers.referer || req.headers.referrer || ""
});
defaultTokens.push({
token: ":http-version",
replacement: `${req.httpVersionMajor}.${req.httpVersionMinor}`
});
defaultTokens.push({
token: ":remote-addr",
replacement:
req.headers["x-forwarded-for"] ||
req.ip ||
req._remoteAddress ||
(req.socket &&
(req.socket.remoteAddress ||
(req.socket.socket && req.socket.socket.remoteAddress)))
});
defaultTokens.push({
token: ":user-agent",
replacement: req.headers["user-agent"]
});
defaultTokens.push({
token: ":content-length",
replacement:
res.getHeader("content-length") ||
(res.__headers && res.__headers["Content-Length"]) ||
"-"
});
defaultTokens.push({
token: /:req\[([^\]]+)]/g,
replacement(_, field) {
return req.headers[field.toLowerCase()];
}
});
defaultTokens.push({
token: /:res\[([^\]]+)]/g,
replacement(_, field) {
return (
res.getHeader(field.toLowerCase()) ||
(res.__headers && res.__headers[field])
);
}
});
return arrayUniqueTokens(customTokens.concat(defaultTokens));
}
/**
* Return formatted log line.
*
* @param {string} str
* @param {Array} tokens
* @return {string}
* @api private
*/
function format(str, tokens) {
for (let i = 0; i < tokens.length; i++) {
str = str.replace(tokens[i].token, tokens[i].replacement);
}
return str;
}
/**
* Return RegExp Object about nolog
*
* @param {(string|Array)} nolog
* @return {RegExp}
* @api private
*
* syntax
* 1. String
* 1.1 "\\.gif"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
* LOGGING http://example.com/hoge.agif
* 1.2 in "\\.gif|\\.jpg$"
* NOT LOGGING http://example.com/hoge.gif and
* http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
* LOGGING http://example.com/hoge.agif,
* http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
* 1.3 in "\\.(gif|jpe?g|png)$"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
* LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
* 2. RegExp
* 2.1 in /\.(gif|jpe?g|png)$/
* SAME AS 1.3
* 3. Array
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
* SAME AS "\\.jpg|\\.png|\\.gif"
*/
function createNoLogCondition(nolog) {
let regexp = null;
if (nolog instanceof RegExp) {
regexp = nolog;
}
if (typeof nolog === "string") {
regexp = new RegExp(nolog);
}
if (Array.isArray(nolog)) {
// convert to strings
const regexpsAsStrings = nolog.map(reg => (reg.source ? reg.source : reg));
regexp = new RegExp(regexpsAsStrings.join("|"));
}
return regexp;
}
/**
* Allows users to define rules around status codes to assign them to a specific
* logging level.
* There are two types of rules:
* - RANGE: matches a code within a certain range
* E.g. { 'from': 200, 'to': 299, 'level': 'info' }
* - CONTAINS: matches a code to a set of expected codes
* E.g. { 'codes': [200, 203], 'level': 'debug' }
* Note*: Rules are respected only in order of prescendence.
*
* @param {Number} statusCode
* @param {Level} currentLevel
* @param {Object} ruleSet
* @return {Level}
* @api private
*/
function matchRules(statusCode, currentLevel, ruleSet) {
let level = currentLevel;
if (ruleSet) {
const matchedRule = ruleSet.find(rule => {
let ruleMatched = false;
if (rule.from && rule.to) {
ruleMatched = statusCode >= rule.from && statusCode <= rule.to;
} else {
ruleMatched = rule.codes.indexOf(statusCode) !== -1;
}
return ruleMatched;
});
if (matchedRule) {
level = levels.getLevel(matchedRule.level, level);
}
}
return level;
}
/**
* Log requests with the given `options` or a `format` string.
*
* Options:
*
* - `format` Format string, see below for tokens
* - `level` A log4js levels instance. Supports also 'auto'
* - `nolog` A string or RegExp to exclude target logs
* - `statusRules` A array of rules for setting specific logging levels base on status codes
* - `context` Whether to add a response of express to the context
*
* Tokens:
*
* - `:req[header]` ex: `:req[Accept]`
* - `:res[header]` ex: `:res[Content-Length]`
* - `:http-version`
* - `:response-time`
* - `:remote-addr`
* - `:date`
* - `:method`
* - `:url`
* - `:referrer`
* - `:user-agent`
* - `:status`
*
* @return {Function}
* @param logger4js
* @param options
* @api public
*/
module.exports = function getLogger(logger4js, options) {
if (typeof options === "string" || typeof options === "function") {
options = { format: options };
} else {
options = options || {};
}
const thisLogger = logger4js;
let level = levels.getLevel(options.level, levels.INFO);
const fmt = options.format || DEFAULT_FORMAT;
const nolog = createNoLogCondition(options.nolog);
return (req, res, next) => {
// mount safety
if (req._logging) return next();
// nologs
if (nolog && nolog.test(req.originalUrl)) return next();
if (thisLogger.isLevelEnabled(level) || options.level === "auto") {
const start = new Date();
const { writeHead } = res;
// flag as logging
req._logging = true;
// proxy for statusCode.
res.writeHead = (code, headers) => {
res.writeHead = writeHead;
res.writeHead(code, headers);
res.__statusCode = code;
res.__headers = headers || {};
};
// hook on end request to emit the log entry of the HTTP request.
let finished = false;
const handler = () => {
if (finished) {
return;
}
finished = true;
res.responseTime = new Date() - start;
// status code response level handling
if (res.statusCode && options.level === "auto") {
level = levels.INFO;
if (res.statusCode >= 300) level = levels.WARN;
if (res.statusCode >= 400) level = levels.ERROR;
}
level = matchRules(res.statusCode, level, options.statusRules);
const combinedTokens = assembleTokens(req, res, options.tokens || []);
if (options.context) thisLogger.addContext("res", res);
if (typeof fmt === "function") {
const line = fmt(req, res, str => format(str, combinedTokens));
if (line) thisLogger.log(level, line);
} else {
thisLogger.log(level, format(fmt, combinedTokens));
}
if (options.context) thisLogger.removeContext("res");
};
res.on("end", handler);
res.on("finish", handler);
res.on("error", handler);
res.on("close", handler);
}
// ensure next gets always called
return next();
};
};
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./.git/packed-refs | # pack-refs with: peeled fully-peeled sorted
3b31485091202b7118b15275cb33d200cb923bec refs/remotes/origin/1388-invalid-location-type-passed-to-loggingevent-constructor
2b889fe7764fb2e10d7bd1f0bfc28c1561e155a4 refs/remotes/origin/date-rolling-file-appender
3167185b8559a94f67c3d6262e4168f9fb0425b0 refs/remotes/origin/dependabot/npm_and_yarn/babel/traverse-7.23.2
312dbc446cc6b68929d4057202bdf7d9c4b90c88 refs/remotes/origin/dependabot/npm_and_yarn/react-devtools-core-4.28.4
449db3d24390c648cc0b58b935be8a1b60f13a06 refs/remotes/origin/error-handling
e337bcdb8fd78fe4bd556c2eb5fa72360196f8de refs/remotes/origin/feat/filters-to-allow-multi-appenders
6aacb0da0b93daaa60530a6983660bd1b4915a20 refs/remotes/origin/flush-on-exit
936ad4da8e8c51be12fd050b01e8b3721d6f576d refs/remotes/origin/isaacg-alwaysIncludePattern
a16100ba7258da80c874de3cac9c8556b950125b refs/remotes/origin/logger-decoupling
bd457888eb91b9e932fe8f66d720cf2d9d6442f4 refs/remotes/origin/master
9e8e1f76ad3243ad578123dedef116146bcfabcd refs/remotes/origin/node-0.8-backport
8b49ba9f3d84dd209323e2ad77c3d3898c3fd42e refs/remotes/origin/node-0.8-readable-stream
7d4fdce28f7f1fdb160198283102668fdf6f9dae refs/remotes/origin/refactoring
67b19aeaf301a1c3eb9dbfaf25f580948d85a284 refs/remotes/origin/release-0.5.7-fixup
6352632fb24c3935b73fc8a604cb9f165cd71daf refs/remotes/origin/release-0.6.0-fixup
1fb4f2f8723e6837dc3269970841184684238f74 refs/remotes/origin/streamroller-hotfix
800f0d6bf670bb5434530a3f3734887eb9b18227 refs/remotes/origin/unstable
d80368bf9c7c499a1c68a2c5470b1ee7009560e2 refs/tags/v0.5.0
^ad7e844d6867c49f996cc88a15e5988448db94fe
c7dbe783dddf3e2e25fc248816dbf3b27a194587 refs/tags/v0.5.1
^e3a20a17462fa95fd11306f801014fac3ef0da71
012b0d5ed76995229d09bb1ff274ed058ddeba5d refs/tags/v0.5.2
2bfad6362a07b4bc51f8f63c60fb8ddddd73ceaa refs/tags/v0.5.3
4739c65c684dc9b568f34df3cd656ac800bba1b2 refs/tags/v0.5.4
a9307fd6dababf8a4dbd7e8317c7fd64b57ce16e refs/tags/v0.5.5
8b42e46071a06e2402185af83a88b6c46ec8ae3c refs/tags/v0.5.6
50eefcc70196fc05b235bb516aeaea631362f354 refs/tags/v0.5.7
ecbf41bc8310d433cd898e4a9c9e9fca14f6b8dc refs/tags/v0.6.0
50839262fb823c71230a3adb376e4f3dccabcaa7 refs/tags/v0.6.10
^72bfb5d9809ff8fbbd6ea6ef525ff0618975280a
1aa9a003a48a476c7b9ac0bd79003d5266acede1 refs/tags/v0.6.11
^cd2ee14bde58c9a9a23f9f59fb1d2c1338f736aa
e8caf912cc879d1a18dd2861a0beebff68bff8c8 refs/tags/v0.6.12
^492919b940edd0508073c1e07223da37348e44c4
219cceac78920bb173920ac15621899b9324cf2e refs/tags/v0.6.13
^0c2baa969016fa9d90b605dc616b72347243273d
852856a6f4b9ed322fcaedea3c56c7f504d31c50 refs/tags/v0.6.14
^ca5272aacc714bb4e5a01e6f5ce66564ef076237
702ad21bf5e3ca025550f16a6c3472e815ada315 refs/tags/v0.6.15
^a703f2dc1284a39052f69a430789ea34cf2f5c01
40f38365d62e4cc3c1750b54ece677c05e919294 refs/tags/v0.6.16
^eaa77b045465f2eb8ae30b5a91232b4719315249
7d52ec2e7cc2e8622dd0c4ff43b5a2943f05742c refs/tags/v0.6.17
^fb9948145cb75378c71835d2efe305f505e4c339
3f521f7c9a72829ed65de50fc6041c13eb54808a refs/tags/v0.6.18
^ab778955551458b7c354607e630df8b0411e03d5
49006ce2210cc6815c24ed68d1279d3dd834d4ce refs/tags/v0.6.19
^ae04cc9a4a5af5897b1edee56e460dd86f540e82
86077966a59e2a96037ebfb52f505dcd2788911b refs/tags/v0.6.2
^36c5175a5530d2fb2b1094d3ea61f27839776f0b
84af2b656f46340afd572625d9613b22fc3905f3 refs/tags/v0.6.20
^fb072dd70d0b35e541754c14f6e3e0b2041575ac
0cf30afd72a8b2d3be68c8c8329ee9d6b7c50e9d refs/tags/v0.6.21
^176d44833e4c09bfd64dbcee11e083f436cb1dc5
4778dee3fd14fd2399e7b4af1d8719681761ab05 refs/tags/v0.6.22
^35067af5504ff10254ae0b086d856f7a39bb0c40
aeefc86a55786476451c34266735c03f03d2a7e3 refs/tags/v0.6.23
^1cdd37c4882e2c725c264294f516c672170e26bc
a1fec8df2796ddb227255df700f0be03757fca42 refs/tags/v0.6.24
^9fe32d06e30680b31a558fd96ed3bfe048da3051
e937543ee60fc11f783f5bc177c6533d9c63e0ff refs/tags/v0.6.25
^8dff114b4935a051e09115c89bec87aac095db15
6aca3d908437398a715fd1d2dc985b906c0aec70 refs/tags/v0.6.26
^fc4cdea50dcc95b5827e7e01390fe648a1206391
6eefe61f16a15d3a3393302cc4fddaabf3aa5bc9 refs/tags/v0.6.27
^70752c5c4f4230adc46d1086d5d32810a36e60b2
dbe71c92867e44643a5c51dab65deec1b3abeb8f refs/tags/v0.6.28
^37ccd24fd0bdd41e293e76efa90e3f5da133dc1e
2219452c97e32c3089e7be0be2ec9f43b60e2e42 refs/tags/v0.6.29
^033e52a0db9fc952ab8e5b0e56854086e9a1cdf2
c68286f3ba3526b41763b4dfd412e43ab13f7c6f refs/tags/v0.6.3
^7844b0d2e42d398af7579ac116ab03a74c4283f3
fec3d9e6e3aab5eea370d559a87efed3e4449c8a refs/tags/v0.6.30
^55c516740c67f7f370802166b45aadf6c5f359bc
a01113fd60c9e85494780ed9eabdc28bcc1854e6 refs/tags/v0.6.31
^0f8bffda210b784c885314b7a4d8fa3ab0968be2
534595c43f17ce030f308876dcb0fa25c19b42db refs/tags/v0.6.32
^b1a36cbe7603563b4a3c0eecfd75cc6558832c99
b5f7e2794368d6d24805d18cd41fc0cc4c0c46e7 refs/tags/v0.6.33
^111ba283e91c7fb8a2f3ded86560f3a52659a4b7
4c095a303e9b05f1949340651a63f6a0580350b2 refs/tags/v0.6.34
^451f79a29a1661910f17f5e0588ddd18491565cb
40fd6f3d18edb77a0a0c091290452e4bc906bdcf refs/tags/v0.6.35
^a1433e2316d7ea0db87fccfac0efa6fee6e6497e
9ac43bb954781bf6c07e90fd06d0ebb858325f6c refs/tags/v0.6.36
^5d9e08a28942ae05a58f82fefdf9aa14315cd6b2
0981fc1fdd5f08529f2751381f9dd04fdce1b525 refs/tags/v0.6.37
^4e0a71865eb1f589b6a1c4017f080222dfa32626
1f137a74bc66456e2e31cc81bb563037a92713c9 refs/tags/v0.6.38
^6c21e4acd90047525d34ac4f4740e8ee0dba3bc2
0943f3ced3baf6abffdcb56920dcec9953cc8c39 refs/tags/v0.6.4
^af6ae7af9883bdf90de55ac133ed41f1077cb691
2e9d19fa1ad804ae024dd98f439d02e471e677f2 refs/tags/v0.6.5
^fdc9d253c9195bd704940be79355d053d70112f5
c6d9519d9790ebdd9dbf3c11615781ac756bd635 refs/tags/v0.6.6
^8383dfc4f4e7785fb6d8f5b3f37be5ddd4a36f5b
7206c9d63dc89cc7a26c82c59443bcee3e8a90ec refs/tags/v0.6.7
^1e17f88ded8b72edeea008dd0566fccfb6456377
f72ab747e21fabf1423eeb1093cc0d589a19470d refs/tags/v0.6.8
^3018a49bde08939f71606a254597721cae0ebe5b
0e1f73c5e5f572801f14f63ff28ee252f89000ef refs/tags/v0.6.9
^d2f044a45107497f0774cf6aeb0f499be5f0d33c
2efa9f373d5e476b4f15b302aa9cc92fbcbcd26e refs/tags/v1.0.1
^e4f196a6823308168c917567b5e456a67c1ea699
af5d50d76ce5a4b7fe41f8fccbedfed17d323249 refs/tags/v1.1.0
^f7ec3ccf82dd9eda1fd617559f94373e42399c68
e4f8105ccc99539452ed4c467304501413dd24a0 refs/tags/v1.1.1
^1fb4f2f8723e6837dc3269970841184684238f74
e12bd00dd831d28513caadf504b413c8c1ac159f refs/tags/v2.0.0
^06cb8c680d12831b0c7d6524bd98220f9f329c04
63d3813b339bc9829701962379f10716d019629c refs/tags/v2.0.1
^97f45edf0d143312cdec73e5476e33166618ddff
37b71663a00ada3ab70790f4d16e227f51580f10 refs/tags/v2.1.0
^558d94db9b880908430fd4712a8d4809eb5dfb4d
3bb6d1516dab9a9684bf1ec213562e3f6e86860a refs/tags/v2.10.0
^72c453a741b0eef63f00d74fec962ec0a1bc8489
91bcd410b52e5fb753fbedbb77553c161fb5f311 refs/tags/v2.11.0
^8ad2c39b7887af765855cd561e1ec053e60e66a4
72d19f9797cc6ce739e45e3e63313c262a11a681 refs/tags/v2.2.0
^c2f1d7bf5d7328453cc74d92858470afe5adc07b
dd313c78ec94690b4d52cb7a39c7672f23758219 refs/tags/v2.3.0
^c8f6bff9f3925d520d6a4517dc4914d40b1417a3
8a6ba2449895729ff67df2e50108b61f2f67a575 refs/tags/v2.3.1
^9b0dff30336534ecab2e15e059bf405ba95eb8a2
444a8914ff965d699b6e7e0bab82d2af33ba942a refs/tags/v2.3.10
^9847c8ae8108f4e0debf2260f698b20b599c337b
07a5bab61a29707078a5cf2ac7185fd214f5006d refs/tags/v2.3.11
^7bf6af872422018d12fa8fcbaf73bab066cd071b
377270464a4491a76d1f25be7a9c902e56c23d2f refs/tags/v2.3.12
^b65871a4abded4456b9e243725099f35acc6070b
973004d78857139409f95682eaada20cc694a98e refs/tags/v2.3.2
^d9fcd3f165f21525114fcdefa49fc7261ab36573
2072a873ea829c3f1008579d75704490fa53ed3e refs/tags/v2.3.3
^9c19fd5e8e37341f0a67ba0c726bf3996e67d2ee
a86661756f6e96230458edd7ba150c7bb87e1d9b refs/tags/v2.3.4
^31e4e01e6b007386473b14f17dde562c20e0050a
27ee6afb58e0f1a075102e91ed5ab2e3e0f4dc8c refs/tags/v2.3.5
^f7637dfbf9b0494a7310f1e477e71ed6f07d88d1
6f90cef2f2cc6bc539014af3d85f78a7df25fbce refs/tags/v2.3.6
^15c35ef281b1668dd79459a8d8a759b1bfdc2dd7
31eea60f922771ecb0cb52cec1d191a62b52e53e refs/tags/v2.3.7
^edd07f23fb55a1510667600079831d691e6e0664
bb76f63df167addf8b56de1fa9c2666b9726d0c9 refs/tags/v2.3.8
^b5101160afb97c94bc1c7f6f8de9b37f02dee852
eb30515da2dcca800f719ba62b0de8672af45a2d refs/tags/v2.3.9
^c1c58e3bf39cb7d0953e8631b69f56a7307b6a2d
39271a33bc3d7a4eaccdb5e86c4c045ccc3b89cf refs/tags/v2.4.0
^d5687b357febfe2f2b4d245f4c29ba2d92f314f8
b2a5577d6e2511ebb84e169a140058ef26c19ae4 refs/tags/v2.4.1
^10dac6d6fd3612da730f07bd2bcf7b0f28ba3541
75add43091bfba93d017022871f51c33749574d1 refs/tags/v2.5.0
^b3db83eefa7ea81e8ce599f19752e3626933ee25
55aaa9d201ac698172f1ad334cec03f256564141 refs/tags/v2.5.1
^0078badaddfe8696201bb489c79a18a7ce492aae
b55f1b189332b2074ea68c892e6aceec75cb76c4 refs/tags/v2.5.2
^822ca2e385e7ecdd0c0fa68bb1c9fa58936cf5df
9516bf71e270783dd4a38e5581fdefdb8f448e6d refs/tags/v2.5.3
^d0b630aeeffc2fa0cc88e26b03a0a9cbf0658ba5
5e27721515ddbc7d59ec1bbaadcb4f8658555c20 refs/tags/v2.6.0
^6598898c4c6397698811cd3dd3b45c6b88d5c615
90a558c3844b93988b22427d6727f137f65d4a45 refs/tags/v2.6.1
^26d18314c1908d006c1271aaa8de380c6f21b554
6ec79a9bf0dab61ef515eb82a1ae5381144843b3 refs/tags/v2.7.0
^b2f420cca945667a3b1384e4dee2a6954e293c52
b3d62d6030f02236c7167518d259019315e2a2c3 refs/tags/v2.8.0
^64ef14a7486c018fe0237470c65317639b8b23fd
97e802e703da271c255052bcc8c083108cbd2d0c refs/tags/v2.9.0
^0a7c0f76449861e809ce2410d0e247151378de8f
b7f7c502987eafe39a646b89d5b1598ed5899e88 refs/tags/v3.0.0
^45eca6943b039a3ff8c84ef9fe926b59567f2df2
6e9472f5a5f707468623089d0652623cf36c37a4 refs/tags/v3.0.1
^c7e9b06edf1bf7e53064c1f7062d598c40d35a8d
bc4e169d983ef7cd34b3a6215d201de79fcdfc56 refs/tags/v3.0.2
^36494a6f32c31f8b84c28ee45a455c3a2e2e2b2f
f7b788398d0e7d632dec7fb844f0e54eaac0b496 refs/tags/v3.0.3
^07e3b0bbe0af0b8454e7ebf47804b3ede1935de4
b2d00a0aa962fb0ea2f895070660050e24d48a78 refs/tags/v3.0.4
^9120b77b49adf388960232420cab8a763cfc54cd
839407b851d362e237f9160e3cb23cb7e07355f8 refs/tags/v3.0.5
^61cc6818369c5f1a83cefded53688375f5b28b0c
d3d91316a2cf570c549b82a416dc105ffe32998c refs/tags/v3.0.6
^86333dc57d86928c7f4617bd439724840d3ace2f
89528c654f222a4582ad318018d77b256c89b823 refs/tags/v4.0.0
^1554503985ae65ce317dcd2ac90fe21a3cf32771
d513e14debb022fcf248b7ae3f966e699532b52a refs/tags/v4.0.1
^24c3c4d43fa1444cd645f4e8c7ceb4f960dd21f7
4007bbd8ca9c7aef2942c8dcf847abb7ff593a00 refs/tags/v4.0.2
^8221adac2dfc42bfc72ad3692a112d4cd02116f2
5cba94c4ea297fd1f73fa2c897777ab77a5d7fea refs/tags/v4.1.0
^945247028a226e002895dc3ad3ef377c34334f00
405d66c6d36264c3073f6d6eb6716372fadf288b refs/tags/v4.1.1
^8f757e14470251cfa94a274f779051281f8bce8f
724fe3917042ebb6298032f483381a0c16942831 refs/tags/v4.2.0
^853c9c9c1ab58ecb9e5e8168b538c7f839f46c81
9bc74935bbf14f79026f9b407bc017f0a7ca66b8 refs/tags/v4.3.0
^d83e211d7d4c608d8ab069130ece897aa8237d4a
ede30ca43ded948983b1f0da077e912ff4c0a983 refs/tags/v4.3.1
^30b7728b53f0418df3b39302544a9bbbbeea7b05
86f60670ab21e656c1df46d6e32a09a9359a5d0a refs/tags/v4.3.2
^a46fde443d0dfee8f123284d7ca4199516d7bae2
e5ec78c48902833e1240f7b3f24cc00052a5af66 refs/tags/v4.4.0
^0d7fd0cf1d44cb0c1b3b89766c2473975a79d2e7
c0205c0730cd5b7624c60e4875df6f8ad5a995b9 refs/tags/v4.5.0
^a3f494f87f915324bc21430cef310e48a3a5fa26
7b45475fd94c8421ab698179e7589a1178bd385c refs/tags/v4.5.1
^9ee137b8be38326dbc33c1223cb1dd18670e4936
4d8b5cbb9a0457ebbb02cdf844044b6c51cca099 refs/tags/v5.0.0
^984b832e9454932916288069701b52284824a113
930045ee0ecabac05fe2f68907c7b1e7d61ff12f refs/tags/v5.1.0
^c912231df93fdbbd92cc6bd826bfd413674fff30
0e6beb59fa5504b3a4f3d9b34fa1b165be37ecd0 refs/tags/v5.2.0
^48ab8f3984bbb5cc337b53a0b2a9bbddfaa646c9
47382dcad7da765908cd5d4fda80b6576c86ceda refs/tags/v5.2.1
^70e4b8c2792af8c31986a4456f0b287d6709f31c
7d6d1ecbdad2b336fe703305f8582737d747fab4 refs/tags/v5.2.2
^5d5d6670b2e4fb321b77f6a6325375a0b66a582f
ae74df26b9e057e9933ff624b3b293764276b38e refs/tags/v5.3.0
^ca6ad398585d3cb6252a9510fa0500cde24d5d18
ceed3214c1123bd5d72a298e4bf2be4f8c8c26c8 refs/tags/v6.0.0
^660d336b8593a9d32b8b81643a24052d4149da68
a3f972b7319945534d96ab508e713f0efeacaf82 refs/tags/v6.1.0
^f42038628addf746e0ff04f294b18d2bb3432f61
8e8c15188e927387af619db055891c8ed54e7640 refs/tags/v6.1.1
^d37678f079c014d3cf88d5136435015cb18c9a3b
ecc8a90d17a558096bce7fac7fec2f59f6c12270 refs/tags/v6.1.2
^04f8fa3ac77e1b48e21ad7134bbe090a95994ca2
751b9f45bd812faea1f4008d1716245ffcbfd02d refs/tags/v6.2.0
^9a8c6d3c7ab179a1ce424f3782e89b343425648a
d6e320f481e0ae2bb45147102dff6f02946d2678 refs/tags/v6.2.1
^5175f3f2fc48b169aa0ced71b217466e02cc4804
e5b54c5d8624243d34386c44b0f04fb6fe70e5ab refs/tags/v6.3.0
^fcf95482f304fc8d87fdc9b9dc60e72ab3ddb9ae
ab5efc1f04232675d6488a8ae3c1c16e8ad7fa1c refs/tags/v6.4.0
^9fdbed5ad45d1b09b35c1ef5355ba726b60cb702
930a1952e3d1744b64c369ec3ec690a39d346cf3 refs/tags/v6.4.1
^909a522c2789a5c404ae0720e581837ec4476753
75251901064b58597f203e23f25aa3083f0e9bc0 refs/tags/v6.4.2
^3dfa03e36bd81e36fc2860245d26789dc6d63528
5c64b5e031e2ce9c533c57e816fe3ba9b5d57f73 refs/tags/v6.4.3
^58cfdc3fca3163d99922f3e09eb4f1e489720114
488af726cec687a0a876b5a5a42a96203fe37b88 refs/tags/v6.4.4
^8281d3af604842c83d5fad5a65962d4b145cf589
112eb7d5aad9caa2d951b8e1c580e81f6b14673f refs/tags/v6.4.5
^040ae00794c68103ba131211bfd9c7c5b7b18c9d
33ae2940223319fb8117814573e0d0bf70966538 refs/tags/v6.4.6
^d97243b0aa6969338136a6c99dca95473d60db0b
24436d12d9878f777dfc16aab54afcb1865e52f5 refs/tags/v6.4.7
^b81c08b1f9e8d6fe489e6c424a11343910bfdc1c
acf36b0ea63a40f5ad0fdee480ddd8cd0d755336 refs/tags/v6.5.0
^a46871eaedc3edcec849672b6c5b09ebb8b4f699
d517aaeda766f37a150ce5fec680edc332b0bf92 refs/tags/v6.5.1
^d7d056f372e86945b55faef7b851bb5dfec1b4c6
86155225003a39dfbc0224f53c0d9d1d12ff9d42 refs/tags/v6.5.2
^505b21a5a793570ca189f44d7478bccc8b15a8a1
149d10df6e43dd21dbce92772795967e1e10d32a refs/tags/v6.6.0
^28893ffd11cfeda001332114c34e4c4d2d7375a8
5db45425292a1da0e7d80c98fcfb4e76b4983bdb refs/tags/v6.6.1
^03cfcc16cd740e9740b696f42836e6b84c2f6809
21e6aa82be887f9961e3b43195e4c702e4283e6d refs/tags/v6.7.0
^a08da654c99470f1c9bf93b84c744d87a215c74c
623e961515cda738325ecc1914732fc2d3a15258 refs/tags/v6.7.1
^43bdea724337d7cf98f2953335792aeac6e670e4
1095e1cae0403b31124fa1316fdf31d72e6c6c4d refs/tags/v6.8.0
^66337c177f756f4228ac9b16e1868ebf54029abd
7bffc519ad18a6048e737331c38b147178a3a988 refs/tags/v6.9.0
^b3919d86c8a49cec7c5799c128e320a42630456c
783d8bfd49b99d5aa014f47e6f5d7e35df4f54d9 refs/tags/v6.9.1
^26dcec62f9677dceba57de8cd717ff91447781c7
| # pack-refs with: peeled fully-peeled sorted
3b31485091202b7118b15275cb33d200cb923bec refs/remotes/origin/1388-invalid-location-type-passed-to-loggingevent-constructor
2b889fe7764fb2e10d7bd1f0bfc28c1561e155a4 refs/remotes/origin/date-rolling-file-appender
3167185b8559a94f67c3d6262e4168f9fb0425b0 refs/remotes/origin/dependabot/npm_and_yarn/babel/traverse-7.23.2
312dbc446cc6b68929d4057202bdf7d9c4b90c88 refs/remotes/origin/dependabot/npm_and_yarn/react-devtools-core-4.28.4
449db3d24390c648cc0b58b935be8a1b60f13a06 refs/remotes/origin/error-handling
e337bcdb8fd78fe4bd556c2eb5fa72360196f8de refs/remotes/origin/feat/filters-to-allow-multi-appenders
6aacb0da0b93daaa60530a6983660bd1b4915a20 refs/remotes/origin/flush-on-exit
936ad4da8e8c51be12fd050b01e8b3721d6f576d refs/remotes/origin/isaacg-alwaysIncludePattern
a16100ba7258da80c874de3cac9c8556b950125b refs/remotes/origin/logger-decoupling
bd457888eb91b9e932fe8f66d720cf2d9d6442f4 refs/remotes/origin/master
9e8e1f76ad3243ad578123dedef116146bcfabcd refs/remotes/origin/node-0.8-backport
8b49ba9f3d84dd209323e2ad77c3d3898c3fd42e refs/remotes/origin/node-0.8-readable-stream
7d4fdce28f7f1fdb160198283102668fdf6f9dae refs/remotes/origin/refactoring
67b19aeaf301a1c3eb9dbfaf25f580948d85a284 refs/remotes/origin/release-0.5.7-fixup
6352632fb24c3935b73fc8a604cb9f165cd71daf refs/remotes/origin/release-0.6.0-fixup
1fb4f2f8723e6837dc3269970841184684238f74 refs/remotes/origin/streamroller-hotfix
800f0d6bf670bb5434530a3f3734887eb9b18227 refs/remotes/origin/unstable
d80368bf9c7c499a1c68a2c5470b1ee7009560e2 refs/tags/v0.5.0
^ad7e844d6867c49f996cc88a15e5988448db94fe
c7dbe783dddf3e2e25fc248816dbf3b27a194587 refs/tags/v0.5.1
^e3a20a17462fa95fd11306f801014fac3ef0da71
012b0d5ed76995229d09bb1ff274ed058ddeba5d refs/tags/v0.5.2
2bfad6362a07b4bc51f8f63c60fb8ddddd73ceaa refs/tags/v0.5.3
4739c65c684dc9b568f34df3cd656ac800bba1b2 refs/tags/v0.5.4
a9307fd6dababf8a4dbd7e8317c7fd64b57ce16e refs/tags/v0.5.5
8b42e46071a06e2402185af83a88b6c46ec8ae3c refs/tags/v0.5.6
50eefcc70196fc05b235bb516aeaea631362f354 refs/tags/v0.5.7
ecbf41bc8310d433cd898e4a9c9e9fca14f6b8dc refs/tags/v0.6.0
50839262fb823c71230a3adb376e4f3dccabcaa7 refs/tags/v0.6.10
^72bfb5d9809ff8fbbd6ea6ef525ff0618975280a
1aa9a003a48a476c7b9ac0bd79003d5266acede1 refs/tags/v0.6.11
^cd2ee14bde58c9a9a23f9f59fb1d2c1338f736aa
e8caf912cc879d1a18dd2861a0beebff68bff8c8 refs/tags/v0.6.12
^492919b940edd0508073c1e07223da37348e44c4
219cceac78920bb173920ac15621899b9324cf2e refs/tags/v0.6.13
^0c2baa969016fa9d90b605dc616b72347243273d
852856a6f4b9ed322fcaedea3c56c7f504d31c50 refs/tags/v0.6.14
^ca5272aacc714bb4e5a01e6f5ce66564ef076237
702ad21bf5e3ca025550f16a6c3472e815ada315 refs/tags/v0.6.15
^a703f2dc1284a39052f69a430789ea34cf2f5c01
40f38365d62e4cc3c1750b54ece677c05e919294 refs/tags/v0.6.16
^eaa77b045465f2eb8ae30b5a91232b4719315249
7d52ec2e7cc2e8622dd0c4ff43b5a2943f05742c refs/tags/v0.6.17
^fb9948145cb75378c71835d2efe305f505e4c339
3f521f7c9a72829ed65de50fc6041c13eb54808a refs/tags/v0.6.18
^ab778955551458b7c354607e630df8b0411e03d5
49006ce2210cc6815c24ed68d1279d3dd834d4ce refs/tags/v0.6.19
^ae04cc9a4a5af5897b1edee56e460dd86f540e82
86077966a59e2a96037ebfb52f505dcd2788911b refs/tags/v0.6.2
^36c5175a5530d2fb2b1094d3ea61f27839776f0b
84af2b656f46340afd572625d9613b22fc3905f3 refs/tags/v0.6.20
^fb072dd70d0b35e541754c14f6e3e0b2041575ac
0cf30afd72a8b2d3be68c8c8329ee9d6b7c50e9d refs/tags/v0.6.21
^176d44833e4c09bfd64dbcee11e083f436cb1dc5
4778dee3fd14fd2399e7b4af1d8719681761ab05 refs/tags/v0.6.22
^35067af5504ff10254ae0b086d856f7a39bb0c40
aeefc86a55786476451c34266735c03f03d2a7e3 refs/tags/v0.6.23
^1cdd37c4882e2c725c264294f516c672170e26bc
a1fec8df2796ddb227255df700f0be03757fca42 refs/tags/v0.6.24
^9fe32d06e30680b31a558fd96ed3bfe048da3051
e937543ee60fc11f783f5bc177c6533d9c63e0ff refs/tags/v0.6.25
^8dff114b4935a051e09115c89bec87aac095db15
6aca3d908437398a715fd1d2dc985b906c0aec70 refs/tags/v0.6.26
^fc4cdea50dcc95b5827e7e01390fe648a1206391
6eefe61f16a15d3a3393302cc4fddaabf3aa5bc9 refs/tags/v0.6.27
^70752c5c4f4230adc46d1086d5d32810a36e60b2
dbe71c92867e44643a5c51dab65deec1b3abeb8f refs/tags/v0.6.28
^37ccd24fd0bdd41e293e76efa90e3f5da133dc1e
2219452c97e32c3089e7be0be2ec9f43b60e2e42 refs/tags/v0.6.29
^033e52a0db9fc952ab8e5b0e56854086e9a1cdf2
c68286f3ba3526b41763b4dfd412e43ab13f7c6f refs/tags/v0.6.3
^7844b0d2e42d398af7579ac116ab03a74c4283f3
fec3d9e6e3aab5eea370d559a87efed3e4449c8a refs/tags/v0.6.30
^55c516740c67f7f370802166b45aadf6c5f359bc
a01113fd60c9e85494780ed9eabdc28bcc1854e6 refs/tags/v0.6.31
^0f8bffda210b784c885314b7a4d8fa3ab0968be2
534595c43f17ce030f308876dcb0fa25c19b42db refs/tags/v0.6.32
^b1a36cbe7603563b4a3c0eecfd75cc6558832c99
b5f7e2794368d6d24805d18cd41fc0cc4c0c46e7 refs/tags/v0.6.33
^111ba283e91c7fb8a2f3ded86560f3a52659a4b7
4c095a303e9b05f1949340651a63f6a0580350b2 refs/tags/v0.6.34
^451f79a29a1661910f17f5e0588ddd18491565cb
40fd6f3d18edb77a0a0c091290452e4bc906bdcf refs/tags/v0.6.35
^a1433e2316d7ea0db87fccfac0efa6fee6e6497e
9ac43bb954781bf6c07e90fd06d0ebb858325f6c refs/tags/v0.6.36
^5d9e08a28942ae05a58f82fefdf9aa14315cd6b2
0981fc1fdd5f08529f2751381f9dd04fdce1b525 refs/tags/v0.6.37
^4e0a71865eb1f589b6a1c4017f080222dfa32626
1f137a74bc66456e2e31cc81bb563037a92713c9 refs/tags/v0.6.38
^6c21e4acd90047525d34ac4f4740e8ee0dba3bc2
0943f3ced3baf6abffdcb56920dcec9953cc8c39 refs/tags/v0.6.4
^af6ae7af9883bdf90de55ac133ed41f1077cb691
2e9d19fa1ad804ae024dd98f439d02e471e677f2 refs/tags/v0.6.5
^fdc9d253c9195bd704940be79355d053d70112f5
c6d9519d9790ebdd9dbf3c11615781ac756bd635 refs/tags/v0.6.6
^8383dfc4f4e7785fb6d8f5b3f37be5ddd4a36f5b
7206c9d63dc89cc7a26c82c59443bcee3e8a90ec refs/tags/v0.6.7
^1e17f88ded8b72edeea008dd0566fccfb6456377
f72ab747e21fabf1423eeb1093cc0d589a19470d refs/tags/v0.6.8
^3018a49bde08939f71606a254597721cae0ebe5b
0e1f73c5e5f572801f14f63ff28ee252f89000ef refs/tags/v0.6.9
^d2f044a45107497f0774cf6aeb0f499be5f0d33c
2efa9f373d5e476b4f15b302aa9cc92fbcbcd26e refs/tags/v1.0.1
^e4f196a6823308168c917567b5e456a67c1ea699
af5d50d76ce5a4b7fe41f8fccbedfed17d323249 refs/tags/v1.1.0
^f7ec3ccf82dd9eda1fd617559f94373e42399c68
e4f8105ccc99539452ed4c467304501413dd24a0 refs/tags/v1.1.1
^1fb4f2f8723e6837dc3269970841184684238f74
e12bd00dd831d28513caadf504b413c8c1ac159f refs/tags/v2.0.0
^06cb8c680d12831b0c7d6524bd98220f9f329c04
63d3813b339bc9829701962379f10716d019629c refs/tags/v2.0.1
^97f45edf0d143312cdec73e5476e33166618ddff
37b71663a00ada3ab70790f4d16e227f51580f10 refs/tags/v2.1.0
^558d94db9b880908430fd4712a8d4809eb5dfb4d
3bb6d1516dab9a9684bf1ec213562e3f6e86860a refs/tags/v2.10.0
^72c453a741b0eef63f00d74fec962ec0a1bc8489
91bcd410b52e5fb753fbedbb77553c161fb5f311 refs/tags/v2.11.0
^8ad2c39b7887af765855cd561e1ec053e60e66a4
72d19f9797cc6ce739e45e3e63313c262a11a681 refs/tags/v2.2.0
^c2f1d7bf5d7328453cc74d92858470afe5adc07b
dd313c78ec94690b4d52cb7a39c7672f23758219 refs/tags/v2.3.0
^c8f6bff9f3925d520d6a4517dc4914d40b1417a3
8a6ba2449895729ff67df2e50108b61f2f67a575 refs/tags/v2.3.1
^9b0dff30336534ecab2e15e059bf405ba95eb8a2
444a8914ff965d699b6e7e0bab82d2af33ba942a refs/tags/v2.3.10
^9847c8ae8108f4e0debf2260f698b20b599c337b
07a5bab61a29707078a5cf2ac7185fd214f5006d refs/tags/v2.3.11
^7bf6af872422018d12fa8fcbaf73bab066cd071b
377270464a4491a76d1f25be7a9c902e56c23d2f refs/tags/v2.3.12
^b65871a4abded4456b9e243725099f35acc6070b
973004d78857139409f95682eaada20cc694a98e refs/tags/v2.3.2
^d9fcd3f165f21525114fcdefa49fc7261ab36573
2072a873ea829c3f1008579d75704490fa53ed3e refs/tags/v2.3.3
^9c19fd5e8e37341f0a67ba0c726bf3996e67d2ee
a86661756f6e96230458edd7ba150c7bb87e1d9b refs/tags/v2.3.4
^31e4e01e6b007386473b14f17dde562c20e0050a
27ee6afb58e0f1a075102e91ed5ab2e3e0f4dc8c refs/tags/v2.3.5
^f7637dfbf9b0494a7310f1e477e71ed6f07d88d1
6f90cef2f2cc6bc539014af3d85f78a7df25fbce refs/tags/v2.3.6
^15c35ef281b1668dd79459a8d8a759b1bfdc2dd7
31eea60f922771ecb0cb52cec1d191a62b52e53e refs/tags/v2.3.7
^edd07f23fb55a1510667600079831d691e6e0664
bb76f63df167addf8b56de1fa9c2666b9726d0c9 refs/tags/v2.3.8
^b5101160afb97c94bc1c7f6f8de9b37f02dee852
eb30515da2dcca800f719ba62b0de8672af45a2d refs/tags/v2.3.9
^c1c58e3bf39cb7d0953e8631b69f56a7307b6a2d
39271a33bc3d7a4eaccdb5e86c4c045ccc3b89cf refs/tags/v2.4.0
^d5687b357febfe2f2b4d245f4c29ba2d92f314f8
b2a5577d6e2511ebb84e169a140058ef26c19ae4 refs/tags/v2.4.1
^10dac6d6fd3612da730f07bd2bcf7b0f28ba3541
75add43091bfba93d017022871f51c33749574d1 refs/tags/v2.5.0
^b3db83eefa7ea81e8ce599f19752e3626933ee25
55aaa9d201ac698172f1ad334cec03f256564141 refs/tags/v2.5.1
^0078badaddfe8696201bb489c79a18a7ce492aae
b55f1b189332b2074ea68c892e6aceec75cb76c4 refs/tags/v2.5.2
^822ca2e385e7ecdd0c0fa68bb1c9fa58936cf5df
9516bf71e270783dd4a38e5581fdefdb8f448e6d refs/tags/v2.5.3
^d0b630aeeffc2fa0cc88e26b03a0a9cbf0658ba5
5e27721515ddbc7d59ec1bbaadcb4f8658555c20 refs/tags/v2.6.0
^6598898c4c6397698811cd3dd3b45c6b88d5c615
90a558c3844b93988b22427d6727f137f65d4a45 refs/tags/v2.6.1
^26d18314c1908d006c1271aaa8de380c6f21b554
6ec79a9bf0dab61ef515eb82a1ae5381144843b3 refs/tags/v2.7.0
^b2f420cca945667a3b1384e4dee2a6954e293c52
b3d62d6030f02236c7167518d259019315e2a2c3 refs/tags/v2.8.0
^64ef14a7486c018fe0237470c65317639b8b23fd
97e802e703da271c255052bcc8c083108cbd2d0c refs/tags/v2.9.0
^0a7c0f76449861e809ce2410d0e247151378de8f
b7f7c502987eafe39a646b89d5b1598ed5899e88 refs/tags/v3.0.0
^45eca6943b039a3ff8c84ef9fe926b59567f2df2
6e9472f5a5f707468623089d0652623cf36c37a4 refs/tags/v3.0.1
^c7e9b06edf1bf7e53064c1f7062d598c40d35a8d
bc4e169d983ef7cd34b3a6215d201de79fcdfc56 refs/tags/v3.0.2
^36494a6f32c31f8b84c28ee45a455c3a2e2e2b2f
f7b788398d0e7d632dec7fb844f0e54eaac0b496 refs/tags/v3.0.3
^07e3b0bbe0af0b8454e7ebf47804b3ede1935de4
b2d00a0aa962fb0ea2f895070660050e24d48a78 refs/tags/v3.0.4
^9120b77b49adf388960232420cab8a763cfc54cd
839407b851d362e237f9160e3cb23cb7e07355f8 refs/tags/v3.0.5
^61cc6818369c5f1a83cefded53688375f5b28b0c
d3d91316a2cf570c549b82a416dc105ffe32998c refs/tags/v3.0.6
^86333dc57d86928c7f4617bd439724840d3ace2f
89528c654f222a4582ad318018d77b256c89b823 refs/tags/v4.0.0
^1554503985ae65ce317dcd2ac90fe21a3cf32771
d513e14debb022fcf248b7ae3f966e699532b52a refs/tags/v4.0.1
^24c3c4d43fa1444cd645f4e8c7ceb4f960dd21f7
4007bbd8ca9c7aef2942c8dcf847abb7ff593a00 refs/tags/v4.0.2
^8221adac2dfc42bfc72ad3692a112d4cd02116f2
5cba94c4ea297fd1f73fa2c897777ab77a5d7fea refs/tags/v4.1.0
^945247028a226e002895dc3ad3ef377c34334f00
405d66c6d36264c3073f6d6eb6716372fadf288b refs/tags/v4.1.1
^8f757e14470251cfa94a274f779051281f8bce8f
724fe3917042ebb6298032f483381a0c16942831 refs/tags/v4.2.0
^853c9c9c1ab58ecb9e5e8168b538c7f839f46c81
9bc74935bbf14f79026f9b407bc017f0a7ca66b8 refs/tags/v4.3.0
^d83e211d7d4c608d8ab069130ece897aa8237d4a
ede30ca43ded948983b1f0da077e912ff4c0a983 refs/tags/v4.3.1
^30b7728b53f0418df3b39302544a9bbbbeea7b05
86f60670ab21e656c1df46d6e32a09a9359a5d0a refs/tags/v4.3.2
^a46fde443d0dfee8f123284d7ca4199516d7bae2
e5ec78c48902833e1240f7b3f24cc00052a5af66 refs/tags/v4.4.0
^0d7fd0cf1d44cb0c1b3b89766c2473975a79d2e7
c0205c0730cd5b7624c60e4875df6f8ad5a995b9 refs/tags/v4.5.0
^a3f494f87f915324bc21430cef310e48a3a5fa26
7b45475fd94c8421ab698179e7589a1178bd385c refs/tags/v4.5.1
^9ee137b8be38326dbc33c1223cb1dd18670e4936
4d8b5cbb9a0457ebbb02cdf844044b6c51cca099 refs/tags/v5.0.0
^984b832e9454932916288069701b52284824a113
930045ee0ecabac05fe2f68907c7b1e7d61ff12f refs/tags/v5.1.0
^c912231df93fdbbd92cc6bd826bfd413674fff30
0e6beb59fa5504b3a4f3d9b34fa1b165be37ecd0 refs/tags/v5.2.0
^48ab8f3984bbb5cc337b53a0b2a9bbddfaa646c9
47382dcad7da765908cd5d4fda80b6576c86ceda refs/tags/v5.2.1
^70e4b8c2792af8c31986a4456f0b287d6709f31c
7d6d1ecbdad2b336fe703305f8582737d747fab4 refs/tags/v5.2.2
^5d5d6670b2e4fb321b77f6a6325375a0b66a582f
ae74df26b9e057e9933ff624b3b293764276b38e refs/tags/v5.3.0
^ca6ad398585d3cb6252a9510fa0500cde24d5d18
ceed3214c1123bd5d72a298e4bf2be4f8c8c26c8 refs/tags/v6.0.0
^660d336b8593a9d32b8b81643a24052d4149da68
a3f972b7319945534d96ab508e713f0efeacaf82 refs/tags/v6.1.0
^f42038628addf746e0ff04f294b18d2bb3432f61
8e8c15188e927387af619db055891c8ed54e7640 refs/tags/v6.1.1
^d37678f079c014d3cf88d5136435015cb18c9a3b
ecc8a90d17a558096bce7fac7fec2f59f6c12270 refs/tags/v6.1.2
^04f8fa3ac77e1b48e21ad7134bbe090a95994ca2
751b9f45bd812faea1f4008d1716245ffcbfd02d refs/tags/v6.2.0
^9a8c6d3c7ab179a1ce424f3782e89b343425648a
d6e320f481e0ae2bb45147102dff6f02946d2678 refs/tags/v6.2.1
^5175f3f2fc48b169aa0ced71b217466e02cc4804
e5b54c5d8624243d34386c44b0f04fb6fe70e5ab refs/tags/v6.3.0
^fcf95482f304fc8d87fdc9b9dc60e72ab3ddb9ae
ab5efc1f04232675d6488a8ae3c1c16e8ad7fa1c refs/tags/v6.4.0
^9fdbed5ad45d1b09b35c1ef5355ba726b60cb702
930a1952e3d1744b64c369ec3ec690a39d346cf3 refs/tags/v6.4.1
^909a522c2789a5c404ae0720e581837ec4476753
75251901064b58597f203e23f25aa3083f0e9bc0 refs/tags/v6.4.2
^3dfa03e36bd81e36fc2860245d26789dc6d63528
5c64b5e031e2ce9c533c57e816fe3ba9b5d57f73 refs/tags/v6.4.3
^58cfdc3fca3163d99922f3e09eb4f1e489720114
488af726cec687a0a876b5a5a42a96203fe37b88 refs/tags/v6.4.4
^8281d3af604842c83d5fad5a65962d4b145cf589
112eb7d5aad9caa2d951b8e1c580e81f6b14673f refs/tags/v6.4.5
^040ae00794c68103ba131211bfd9c7c5b7b18c9d
33ae2940223319fb8117814573e0d0bf70966538 refs/tags/v6.4.6
^d97243b0aa6969338136a6c99dca95473d60db0b
24436d12d9878f777dfc16aab54afcb1865e52f5 refs/tags/v6.4.7
^b81c08b1f9e8d6fe489e6c424a11343910bfdc1c
acf36b0ea63a40f5ad0fdee480ddd8cd0d755336 refs/tags/v6.5.0
^a46871eaedc3edcec849672b6c5b09ebb8b4f699
d517aaeda766f37a150ce5fec680edc332b0bf92 refs/tags/v6.5.1
^d7d056f372e86945b55faef7b851bb5dfec1b4c6
86155225003a39dfbc0224f53c0d9d1d12ff9d42 refs/tags/v6.5.2
^505b21a5a793570ca189f44d7478bccc8b15a8a1
149d10df6e43dd21dbce92772795967e1e10d32a refs/tags/v6.6.0
^28893ffd11cfeda001332114c34e4c4d2d7375a8
5db45425292a1da0e7d80c98fcfb4e76b4983bdb refs/tags/v6.6.1
^03cfcc16cd740e9740b696f42836e6b84c2f6809
21e6aa82be887f9961e3b43195e4c702e4283e6d refs/tags/v6.7.0
^a08da654c99470f1c9bf93b84c744d87a215c74c
623e961515cda738325ecc1914732fc2d3a15258 refs/tags/v6.7.1
^43bdea724337d7cf98f2953335792aeac6e670e4
1095e1cae0403b31124fa1316fdf31d72e6c6c4d refs/tags/v6.8.0
^66337c177f756f4228ac9b16e1868ebf54029abd
7bffc519ad18a6048e737331c38b147178a3a988 refs/tags/v6.9.0
^b3919d86c8a49cec7c5799c128e320a42630456c
783d8bfd49b99d5aa014f47e6f5d7e35df4f54d9 refs/tags/v6.9.1
^26dcec62f9677dceba57de8cd717ff91447781c7
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/faq.md | # Frequently Asked Questions
## I want errors to go to a special file, but still want everything written to another file - how do I do that?
You'll need to use the [logLevelFilter](logLevelFilter.md). Here's an example configuration:
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
emergencies: { type: 'file', filename: 'oh-no-not-again.log' },
'just-errors': { type: 'logLevelFilter', appender: 'emergencies', level: 'error' }
},
categories: {
default: { appenders: ['just-errors', 'everything'], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('This goes to all-the-logs.log');
logger.info('As does this.');
logger.error('This goes to all-the-logs.log and oh-no-not-again.log');
```
## I want to reload the configuration when I change my config file - how do I do that?
Previous versions of log4js used to watch for changes in the configuration file and reload when it changed. It didn't always work well, sometimes leaving file handles or sockets open. This feature was removed in version 2.x. As a replacement, I'd suggest using a library like [watchr](https://www.npmjs.com/package/watchr) to notify you of file changes. Then you can call `log4js.shutdown` followed by `log4js.configure` again.
## What happened to `replaceConsole` - it doesn't work any more?
I removed `replaceConsole` - it caused a few weird errors, and I wasn't entirely comfortable with messing around with a core part of node. If you still want to do this, then code like this should do the trick:
```javascript
log4js.configure(...); // set up your categories and appenders
const logger = log4js.getLogger('console');
console.log = logger.info.bind(logger); // do the same for others - console.debug, etc.
```
## I'm using pm2/passenger/some other third thing and I'm not getting any logs!
Take a look at the [clustering](clustering.md) docs, they should help you out.
## NPM complains about nodemailer being deprecated, what should I do?
Nodemailer version 4.0.1 (the not-deprecated version) requires a node version >= 6, but log4js supports node versions >= 4. So until I stop supporting node versions less than 6 I can't update the dependency. It's only an optional dependency anyway, so you're free to install [email protected] if you want - as far as I know it should work, the API looks the same to me. If you know that the smtp appender definitely doesn't work with nodemailer v4, then please create an issue with some details about the problem.
## I want line numbers in my logs!
You need to enable call stack for the category, and use pattern layout to output the values. e.g.
```javascript
const log4js = require('log4js');
log4js.configure({
appenders: {
out: {
type: 'stdout',
layout: {
type: 'pattern', pattern: '%d %p %c %f:%l %m%n'
}
}
},
categories: {
default: { appenders: ['out'], level: 'info', enableCallStack: true }
}
});
const logger = log4js.getLogger('thing');
logger.info('this should give me a line number now');
```
Would output something like this:
```bash
2019-05-22T08:41:07.312 INFO thing index.js:16 this should give me a line number now
```
| # Frequently Asked Questions
## I want errors to go to a special file, but still want everything written to another file - how do I do that?
You'll need to use the [logLevelFilter](logLevelFilter.md). Here's an example configuration:
```javascript
log4js.configure({
appenders: {
everything: { type: 'file', filename: 'all-the-logs.log' },
emergencies: { type: 'file', filename: 'oh-no-not-again.log' },
'just-errors': { type: 'logLevelFilter', appender: 'emergencies', level: 'error' }
},
categories: {
default: { appenders: ['just-errors', 'everything'], level: 'debug' }
}
});
const logger = log4js.getLogger();
logger.debug('This goes to all-the-logs.log');
logger.info('As does this.');
logger.error('This goes to all-the-logs.log and oh-no-not-again.log');
```
## I want to reload the configuration when I change my config file - how do I do that?
Previous versions of log4js used to watch for changes in the configuration file and reload when it changed. It didn't always work well, sometimes leaving file handles or sockets open. This feature was removed in version 2.x. As a replacement, I'd suggest using a library like [watchr](https://www.npmjs.com/package/watchr) to notify you of file changes. Then you can call `log4js.shutdown` followed by `log4js.configure` again.
## What happened to `replaceConsole` - it doesn't work any more?
I removed `replaceConsole` - it caused a few weird errors, and I wasn't entirely comfortable with messing around with a core part of node. If you still want to do this, then code like this should do the trick:
```javascript
log4js.configure(...); // set up your categories and appenders
const logger = log4js.getLogger('console');
console.log = logger.info.bind(logger); // do the same for others - console.debug, etc.
```
## I'm using pm2/passenger/some other third thing and I'm not getting any logs!
Take a look at the [clustering](clustering.md) docs, they should help you out.
## NPM complains about nodemailer being deprecated, what should I do?
Nodemailer version 4.0.1 (the not-deprecated version) requires a node version >= 6, but log4js supports node versions >= 4. So until I stop supporting node versions less than 6 I can't update the dependency. It's only an optional dependency anyway, so you're free to install [email protected] if you want - as far as I know it should work, the API looks the same to me. If you know that the smtp appender definitely doesn't work with nodemailer v4, then please create an issue with some details about the problem.
## I want line numbers in my logs!
You need to enable call stack for the category, and use pattern layout to output the values. e.g.
```javascript
const log4js = require('log4js');
log4js.configure({
appenders: {
out: {
type: 'stdout',
layout: {
type: 'pattern', pattern: '%d %p %c %f:%l %m%n'
}
}
},
categories: {
default: { appenders: ['out'], level: 'info', enableCallStack: true }
}
});
const logger = log4js.getLogger('thing');
logger.info('this should give me a line number now');
```
Would output something like this:
```bash
2019-05-22T08:41:07.312 INFO thing index.js:16 this should give me a line number now
```
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./examples/pm2.js | const log4js = require('../lib/log4js');
// NOTE: for PM2 support to work you'll need to install the pm2-intercom module
// `pm2 install pm2-intercom`
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' }
},
categories: {
default: { appenders: ['out'], level: 'info' }
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
const logger = log4js.getLogger('app');
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info('last bubbles', process.env.INSTANCE_ID);
// give pm2 time to set everything up, before we tear it down
setTimeout(() => {
log4js.shutdown(() => {
console.error('All done, shutdown cb returned.');
});
}, 5000);
| const log4js = require('../lib/log4js');
// NOTE: for PM2 support to work you'll need to install the pm2-intercom module
// `pm2 install pm2-intercom`
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' }
},
categories: {
default: { appenders: ['out'], level: 'info' }
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID'
});
const logger = log4js.getLogger('app');
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info('last bubbles', process.env.INSTANCE_ID);
// give pm2 time to set everything up, before we tear it down
setTimeout(() => {
log4js.shutdown(() => {
console.error('All done, shutdown cb returned.');
});
}, 5000);
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./test/tap/test-config.json | {
"appenders": [
{ "type": "stdout" }
]
}
| {
"appenders": [
{ "type": "stdout" }
]
}
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./.git/hooks/pre-receive.sample | #!/bin/sh
#
# An example hook script to make use of push options.
# The example simply echoes all push options that start with 'echoback='
# and rejects all pushes when the "reject" push option is used.
#
# To enable this hook, rename this file to "pre-receive".
if test -n "$GIT_PUSH_OPTION_COUNT"
then
i=0
while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
do
eval "value=\$GIT_PUSH_OPTION_$i"
case "$value" in
echoback=*)
echo "echo from the pre-receive-hook: ${value#*=}" >&2
;;
reject)
exit 1
esac
i=$((i + 1))
done
fi
| #!/bin/sh
#
# An example hook script to make use of push options.
# The example simply echoes all push options that start with 'echoback='
# and rejects all pushes when the "reject" push option is used.
#
# To enable this hook, rename this file to "pre-receive".
if test -n "$GIT_PUSH_OPTION_COUNT"
then
i=0
while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
do
eval "value=\$GIT_PUSH_OPTION_$i"
case "$value" in
echoback=*)
echo "echo from the pre-receive-hook: ${value#*=}" >&2
;;
reject)
exit 1
esac
i=$((i + 1))
done
fi
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./test/tap/fileSyncAppender-test.js | const { test } = require("tap");
const fs = require("fs");
const path = require("path");
const EOL = require("os").EOL || "\n";
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
// doesn't really matter if it failed
}
}
test("log4js fileSyncAppender", batch => {
batch.test("with default fileSyncAppender settings", t => {
const testFile = path.join(__dirname, "/fa-default-sync-test.log");
const logger = log4js.getLogger("default-settings");
remove(testFile);
t.teardown(() => {
remove(testFile);
});
log4js.configure({
appenders: { sync: { type: "fileSync", filename: testFile } },
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This should be in the file.");
fs.readFile(testFile, "utf8", (err, fileContents) => {
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
batch.test("with existing file", t => {
const testFile = path.join(__dirname, "/fa-existing-file-sync-test.log");
const logger = log4js.getLogger("default-settings");
remove(testFile);
t.teardown(() => {
remove(testFile);
});
log4js.configure({
appenders: { sync: { type: "fileSync", filename: testFile } },
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This should be in the file.");
log4js.shutdown(() => {
log4js.configure({
appenders: { sync: { type: "fileSync", filename: testFile } },
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This should also be in the file.");
fs.readFile(testFile, "utf8", (err, fileContents) => {
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(fileContents, `This should also be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test("should give error if invalid filename", async t => {
const file = "";
const expectedError = new Error(`Invalid filename: ${file}`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "fileSync",
filename: file
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("should give error if invalid maxLogSize", async t => {
const maxLogSize = -1;
const expectedError = new Error(`maxLogSize (${maxLogSize}) should be > 0`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "fileSync",
filename: path.join(__dirname, "fa-invalidMaxFileSize-sync-test.log"),
maxLogSize: -1
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("with a max file size and no backups", t => {
const testFile = path.join(__dirname, "/fa-maxFileSize-sync-test.log");
const logger = log4js.getLogger("max-file-size");
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
maxLogSize: 100,
backups: 0
}
},
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This is the first log message.");
logger.info("This is an intermediate log message.");
logger.info("This is the second log message.");
t.test("log file should only contain the second message", assert => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
assert.match(fileContents, `This is the second log message.${EOL}`);
assert.equal(
fileContents.indexOf("This is the first log message."),
-1
);
assert.end();
});
});
t.test("there should be one test files", assert => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-sync-test.log")
);
assert.equal(logFiles.length, 1);
assert.end();
});
});
t.end();
});
batch.test("with a max file size in unit mode and no backups", t => {
const testFile = path.join(__dirname, "/fa-maxFileSize-unit-sync-test.log");
const logger = log4js.getLogger("max-file-size-unit");
remove(testFile);
remove(`${testFile}.1`);
t.teardown(() => {
remove(testFile);
remove(`${testFile}.1`);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
maxLogSize: "1K",
backups: 0,
layout: { type: "messagePassThrough" }
}
},
categories: { default: { appenders: ["sync"], level: "debug" } }
});
const maxLine = 22; // 1024 max file size / 47 bytes per line
for (let i = 0; i < maxLine; i++) {
logger.info("These are the log messages for the first file."); // 46 bytes per line + '\n'
}
logger.info("This is the second log message.");
t.test("log file should only contain the second message", assert => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
assert.match(fileContents, `This is the second log message.${EOL}`);
assert.notMatch(fileContents, "These are the log messages for the first file.");
assert.end();
});
});
t.test("there should be one test file", assert => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-unit-sync-test.log")
);
assert.equal(logFiles.length, 1);
assert.end();
});
});
t.end();
});
batch.test("with a max file size and 2 backups", t => {
const testFile = path.join(
__dirname,
"/fa-maxFileSize-with-backups-sync-test.log"
);
const logger = log4js.getLogger("max-file-size-backups");
remove(testFile);
remove(`${testFile}.1`);
remove(`${testFile}.2`);
t.teardown(() => {
remove(testFile);
remove(`${testFile}.1`);
remove(`${testFile}.2`);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
maxLogSize: 50,
backups: 2
}
},
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This is the first log message.");
logger.info("This is the second log message.");
logger.info("This is the third log message.");
logger.info("This is the fourth log message.");
t.test("the log files", assert => {
assert.plan(5);
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-with-backups-sync-test.log")
);
assert.equal(logFiles.length, 3, "should be 3 files");
assert.same(
logFiles,
[
"fa-maxFileSize-with-backups-sync-test.log",
"fa-maxFileSize-with-backups-sync-test.log.1",
"fa-maxFileSize-with-backups-sync-test.log.2"
],
"should be named in sequence"
);
fs.readFile(
path.join(__dirname, logFiles[0]),
"utf8",
(e, contents) => {
assert.match(contents, "This is the fourth log message.");
}
);
fs.readFile(
path.join(__dirname, logFiles[1]),
"utf8",
(e, contents) => {
assert.match(contents, "This is the third log message.");
}
);
fs.readFile(
path.join(__dirname, logFiles[2]),
"utf8",
(e, contents) => {
assert.match(contents, "This is the second log message.");
}
);
});
});
t.end();
});
batch.test("configure with fileSyncAppender", t => {
const testFile = "tmp-sync-tests.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// this config defines one file appender (to ./tmp-sync-tests.log)
// and sets the log level for "tests" to WARN
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" },
tests: { appenders: ["sync"], level: "warn" }
}
});
const logger = log4js.getLogger("tests");
logger.info("this should not be written to the file");
logger.warn("this should be written to the file");
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(contents.indexOf("this should not be written to the file"), -1);
t.end();
});
});
batch.test("configure with non-existent multi-directory (recursive, nodejs >= 10.12.0)", t => {
const testFile = "tmpA/tmpB/tmpC/tmp-sync-tests-recursive.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync("tmpA/tmpB/tmpC");
fs.rmdirSync("tmpA/tmpB");
fs.rmdirSync("tmpA");
} catch (e) {
// doesn't matter
}
});
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
});
const logger = log4js.getLogger();
logger.info("this should be written to the file");
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.end();
});
});
batch.test("configure with non-existent multi-directory (non-recursive, nodejs < 10.12.0)", t => {
const testFile = "tmpA/tmpB/tmpC/tmp-sync-tests-non-recursive.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync("tmpA/tmpB/tmpC");
fs.rmdirSync("tmpA/tmpB");
fs.rmdirSync("tmpA");
} catch (e) {
// doesn't matter
}
});
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
fs: {
...fs,
mkdirSync(dirPath, options) {
return fs.mkdirSync(dirPath, { ...options, ...{ recursive: false } });
}
}
}
});
sandboxedLog4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
});
const logger = sandboxedLog4js.getLogger();
logger.info("this should be written to the file");
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.end();
});
});
batch.test("configure with non-existent multi-directory (error handling)", t => {
const testFile = "tmpA/tmpB/tmpC/tmp-sync-tests-error-handling.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync("tmpA/tmpB/tmpC");
fs.rmdirSync("tmpA/tmpB");
fs.rmdirSync("tmpA");
} catch (e) {
// doesn't matter
}
});
const errorEPERM = new Error("EPERM");
errorEPERM.code = "EPERM";
let sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEPERM ;
}
}
}
});
t.throws(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
}),
errorEPERM
);
const errorEROFS = new Error("EROFS");
errorEROFS.code = "EROFS";
sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEROFS;
},
statSync() {
return { isDirectory() { return false; } };
}
}
}
});
t.throws(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
}),
errorEROFS
);
fs.mkdirSync("tmpA/tmpB/tmpC", { recursive: true });
sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEROFS;
}
}
}
});
t.doesNotThrow(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
})
);
t.end();
});
batch.test("test options", t => {
const testFile = "tmp-options-tests.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// using non-standard options
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" },
flags: "w",
encoding: "ascii",
mode: 0o666
}
},
categories: {
default: { appenders: ["sync"], level: "info" }
}
});
const logger = log4js.getLogger();
logger.warn("log message");
fs.readFile(testFile, "ascii", (err, contents) => {
t.match(contents, `log message${EOL}`);
t.end();
});
});
batch.end();
});
| const { test } = require("tap");
const fs = require("fs");
const path = require("path");
const EOL = require("os").EOL || "\n";
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
// doesn't really matter if it failed
}
}
test("log4js fileSyncAppender", batch => {
batch.test("with default fileSyncAppender settings", t => {
const testFile = path.join(__dirname, "/fa-default-sync-test.log");
const logger = log4js.getLogger("default-settings");
remove(testFile);
t.teardown(() => {
remove(testFile);
});
log4js.configure({
appenders: { sync: { type: "fileSync", filename: testFile } },
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This should be in the file.");
fs.readFile(testFile, "utf8", (err, fileContents) => {
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
batch.test("with existing file", t => {
const testFile = path.join(__dirname, "/fa-existing-file-sync-test.log");
const logger = log4js.getLogger("default-settings");
remove(testFile);
t.teardown(() => {
remove(testFile);
});
log4js.configure({
appenders: { sync: { type: "fileSync", filename: testFile } },
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This should be in the file.");
log4js.shutdown(() => {
log4js.configure({
appenders: { sync: { type: "fileSync", filename: testFile } },
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This should also be in the file.");
fs.readFile(testFile, "utf8", (err, fileContents) => {
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(fileContents, `This should also be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test("should give error if invalid filename", async t => {
const file = "";
const expectedError = new Error(`Invalid filename: ${file}`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "fileSync",
filename: file
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("should give error if invalid maxLogSize", async t => {
const maxLogSize = -1;
const expectedError = new Error(`maxLogSize (${maxLogSize}) should be > 0`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "fileSync",
filename: path.join(__dirname, "fa-invalidMaxFileSize-sync-test.log"),
maxLogSize: -1
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("with a max file size and no backups", t => {
const testFile = path.join(__dirname, "/fa-maxFileSize-sync-test.log");
const logger = log4js.getLogger("max-file-size");
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
maxLogSize: 100,
backups: 0
}
},
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This is the first log message.");
logger.info("This is an intermediate log message.");
logger.info("This is the second log message.");
t.test("log file should only contain the second message", assert => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
assert.match(fileContents, `This is the second log message.${EOL}`);
assert.equal(
fileContents.indexOf("This is the first log message."),
-1
);
assert.end();
});
});
t.test("there should be one test files", assert => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-sync-test.log")
);
assert.equal(logFiles.length, 1);
assert.end();
});
});
t.end();
});
batch.test("with a max file size in unit mode and no backups", t => {
const testFile = path.join(__dirname, "/fa-maxFileSize-unit-sync-test.log");
const logger = log4js.getLogger("max-file-size-unit");
remove(testFile);
remove(`${testFile}.1`);
t.teardown(() => {
remove(testFile);
remove(`${testFile}.1`);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
maxLogSize: "1K",
backups: 0,
layout: { type: "messagePassThrough" }
}
},
categories: { default: { appenders: ["sync"], level: "debug" } }
});
const maxLine = 22; // 1024 max file size / 47 bytes per line
for (let i = 0; i < maxLine; i++) {
logger.info("These are the log messages for the first file."); // 46 bytes per line + '\n'
}
logger.info("This is the second log message.");
t.test("log file should only contain the second message", assert => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
assert.match(fileContents, `This is the second log message.${EOL}`);
assert.notMatch(fileContents, "These are the log messages for the first file.");
assert.end();
});
});
t.test("there should be one test file", assert => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-unit-sync-test.log")
);
assert.equal(logFiles.length, 1);
assert.end();
});
});
t.end();
});
batch.test("with a max file size and 2 backups", t => {
const testFile = path.join(
__dirname,
"/fa-maxFileSize-with-backups-sync-test.log"
);
const logger = log4js.getLogger("max-file-size-backups");
remove(testFile);
remove(`${testFile}.1`);
remove(`${testFile}.2`);
t.teardown(() => {
remove(testFile);
remove(`${testFile}.1`);
remove(`${testFile}.2`);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
maxLogSize: 50,
backups: 2
}
},
categories: { default: { appenders: ["sync"], level: "debug" } }
});
logger.info("This is the first log message.");
logger.info("This is the second log message.");
logger.info("This is the third log message.");
logger.info("This is the fourth log message.");
t.test("the log files", assert => {
assert.plan(5);
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-with-backups-sync-test.log")
);
assert.equal(logFiles.length, 3, "should be 3 files");
assert.same(
logFiles,
[
"fa-maxFileSize-with-backups-sync-test.log",
"fa-maxFileSize-with-backups-sync-test.log.1",
"fa-maxFileSize-with-backups-sync-test.log.2"
],
"should be named in sequence"
);
fs.readFile(
path.join(__dirname, logFiles[0]),
"utf8",
(e, contents) => {
assert.match(contents, "This is the fourth log message.");
}
);
fs.readFile(
path.join(__dirname, logFiles[1]),
"utf8",
(e, contents) => {
assert.match(contents, "This is the third log message.");
}
);
fs.readFile(
path.join(__dirname, logFiles[2]),
"utf8",
(e, contents) => {
assert.match(contents, "This is the second log message.");
}
);
});
});
t.end();
});
batch.test("configure with fileSyncAppender", t => {
const testFile = "tmp-sync-tests.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// this config defines one file appender (to ./tmp-sync-tests.log)
// and sets the log level for "tests" to WARN
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" },
tests: { appenders: ["sync"], level: "warn" }
}
});
const logger = log4js.getLogger("tests");
logger.info("this should not be written to the file");
logger.warn("this should be written to the file");
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(contents.indexOf("this should not be written to the file"), -1);
t.end();
});
});
batch.test("configure with non-existent multi-directory (recursive, nodejs >= 10.12.0)", t => {
const testFile = "tmpA/tmpB/tmpC/tmp-sync-tests-recursive.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync("tmpA/tmpB/tmpC");
fs.rmdirSync("tmpA/tmpB");
fs.rmdirSync("tmpA");
} catch (e) {
// doesn't matter
}
});
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
});
const logger = log4js.getLogger();
logger.info("this should be written to the file");
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.end();
});
});
batch.test("configure with non-existent multi-directory (non-recursive, nodejs < 10.12.0)", t => {
const testFile = "tmpA/tmpB/tmpC/tmp-sync-tests-non-recursive.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync("tmpA/tmpB/tmpC");
fs.rmdirSync("tmpA/tmpB");
fs.rmdirSync("tmpA");
} catch (e) {
// doesn't matter
}
});
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
fs: {
...fs,
mkdirSync(dirPath, options) {
return fs.mkdirSync(dirPath, { ...options, ...{ recursive: false } });
}
}
}
});
sandboxedLog4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
});
const logger = sandboxedLog4js.getLogger();
logger.info("this should be written to the file");
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.end();
});
});
batch.test("configure with non-existent multi-directory (error handling)", t => {
const testFile = "tmpA/tmpB/tmpC/tmp-sync-tests-error-handling.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync("tmpA/tmpB/tmpC");
fs.rmdirSync("tmpA/tmpB");
fs.rmdirSync("tmpA");
} catch (e) {
// doesn't matter
}
});
const errorEPERM = new Error("EPERM");
errorEPERM.code = "EPERM";
let sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEPERM ;
}
}
}
});
t.throws(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
}),
errorEPERM
);
const errorEROFS = new Error("EROFS");
errorEROFS.code = "EROFS";
sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEROFS;
},
statSync() {
return { isDirectory() { return false; } };
}
}
}
});
t.throws(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
}),
errorEROFS
);
fs.mkdirSync("tmpA/tmpB/tmpC", { recursive: true });
sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEROFS;
}
}
}
});
t.doesNotThrow(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" }
}
},
categories: {
default: { appenders: ["sync"], level: "debug" }
}
})
);
t.end();
});
batch.test("test options", t => {
const testFile = "tmp-options-tests.log";
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// using non-standard options
log4js.configure({
appenders: {
sync: {
type: "fileSync",
filename: testFile,
layout: { type: "messagePassThrough" },
flags: "w",
encoding: "ascii",
mode: 0o666
}
},
categories: {
default: { appenders: ["sync"], level: "info" }
}
});
const logger = log4js.getLogger();
logger.warn("log message");
fs.readFile(testFile, "ascii", (err, contents) => {
t.match(contents, `log message${EOL}`);
t.end();
});
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./test/tap/multiprocess-test.js | const childProcess = require("child_process");
const { test } = require("tap");
const flatted = require("flatted");
const sandbox = require("@log4js-node/sandboxed-module");
const recording = require("../../lib/appenders/recording");
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
},
end() {
fakeNet.closeCalled = true;
}
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: "1.2.3.4",
remotePort: "1234",
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
}
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
test("Multiprocess Appender", async batch => {
batch.beforeEach(() => {
recording.erase();
});
batch.test("worker", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
worker: {
type: "multiprocess",
mode: "worker",
loggerPort: 1234,
loggerHost: "pants"
}
},
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.close();
logger.info("after error, before connect");
fakeNet.cbs.connect();
logger.info("after error, after connect");
logger.error(new Error("Error test"));
const net = fakeNet;
t.test("should open a socket to the loggerPort and loggerHost", assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "pants");
assert.end();
});
t.test(
"should buffer messages written before socket is connected",
assert => {
assert.match(net.data[0], "before connect");
assert.end();
}
);
t.test(
"should write log messages to socket as flatted strings with a terminator string",
assert => {
assert.match(net.data[0], "before connect");
assert.equal(net.data[1], "__LOG4JS__");
assert.match(net.data[2], "after connect");
assert.equal(net.data[3], "__LOG4JS__");
assert.equal(net.encoding, "utf8");
assert.end();
}
);
t.test("should attempt to re-open the socket on error", assert => {
assert.match(net.data[4], "after error, before connect");
assert.equal(net.data[5], "__LOG4JS__");
assert.match(net.data[6], "after error, after connect");
assert.equal(net.data[7], "__LOG4JS__");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.test("should serialize an Error correctly", assert => {
assert.ok(
flatted.parse(net.data[8]).data[0].stack,
`Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(net.data[8]).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.test("worker with timeout", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.timeout();
logger.info("after timeout, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after timeout, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker with error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.error();
logger.info("after error, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after error, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
t.test("should open a socket to localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
batch.test("master", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet,
"./appenders/recording": recording
}
});
log4js.configure({
appenders: {
recorder: { type: "recording" },
master: {
type: "multiprocess",
mode: "master",
loggerPort: 1234,
loggerHost: "server",
appender: "recorder"
}
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
const net = fakeNet;
t.test(
"should listen for log messages on loggerPort and loggerHost",
assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "server");
assert.end();
}
);
t.test("should return the underlying appender", assert => {
log4js
.getLogger()
.info("this should be sent to the actual appender directly");
assert.equal(
recording.replay()[0].data[0],
"this should be sent to the actual appender directly"
);
assert.end();
});
t.test('should log the error on "error" event', assert => {
net.cbs.error(new Error("Expected error"));
const logEvents = recording.replay();
assert.plan(2);
assert.equal(logEvents.length, 1);
assert.equal(
"A worker log process hung up unexpectedly",
logEvents[0].data[0]
);
});
t.test("when a client connects", assert => {
const logString = `${flatted.stringify({
level: { level: 10000, levelStr: "DEBUG" },
data: ["some debug"]
})}__LOG4JS__`;
net.cbs.data(
`${flatted.stringify({
level: { level: 40000, levelStr: "ERROR" },
data: ["an error message"]
})}__LOG4JS__`
);
net.cbs.data(logString.slice(0, 10));
net.cbs.data(logString.slice(10));
net.cbs.data(logString + logString + logString);
net.cbs.end(
`${flatted.stringify({
level: { level: 50000, levelStr: "FATAL" },
data: ["that's all folks"]
})}__LOG4JS__`
);
net.cbs.data("bad message__LOG4JS__");
const logEvents = recording.replay();
// should parse log messages into log events and send to appender
assert.equal(logEvents[0].level.toString(), "ERROR");
assert.equal(logEvents[0].data[0], "an error message");
assert.equal(logEvents[0].remoteAddress, "1.2.3.4");
assert.equal(logEvents[0].remotePort, "1234");
// should parse log messages split into multiple chunks'
assert.equal(logEvents[1].level.toString(), "DEBUG");
assert.equal(logEvents[1].data[0], "some debug");
assert.equal(logEvents[1].remoteAddress, "1.2.3.4");
assert.equal(logEvents[1].remotePort, "1234");
// should parse multiple log messages in a single chunk'
assert.equal(logEvents[2].data[0], "some debug");
assert.equal(logEvents[3].data[0], "some debug");
assert.equal(logEvents[4].data[0], "some debug");
// should handle log messages sent as part of end event'
assert.equal(logEvents[5].data[0], "that's all folks");
// should handle unparseable log messages
assert.equal(logEvents[6].level.toString(), "ERROR");
assert.equal(logEvents[6].categoryName, "log4js");
assert.equal(logEvents[6].data[0], "Unable to parse log:");
assert.equal(logEvents[6].data[1], "bad message");
assert.end();
});
t.end();
});
batch.test("master without actual appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: { master: { type: "multiprocess", mode: "master" } },
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master must have an "appender" defined')
);
t.end();
});
batch.test("master with unknown appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: {
master: { type: "multiprocess", mode: "master", appender: "cheese" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master appender "cheese" not defined')
);
t.end();
});
batch.test("master defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
stdout: { type: "stdout" },
master: { type: "multiprocess", mode: "master", appender: "stdout" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
t.test("should listen for log messages on localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
await batch.test('e2e test', async (assert) => {
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recording: { type: 'recording' },
master: { type: 'multiprocess', mode: 'master', appender: 'recording', loggerPort: 5001 },
},
categories: { default: { appenders: ['recording'], level: 'trace' } },
});
const child = childProcess.fork(
require.resolve('../multiprocess-worker.js'),
['start-multiprocess-worker', '5001'],
{ stdio: 'inherit' }
);
const actualMsg = await new Promise((res, rej) => {
child.on('message', res);
child.on('error', rej);
});
const logEvents = recording.replay();
assert.equal(actualMsg, 'worker is done');
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'Logging from worker');
assert.end();
});
batch.end();
});
| const childProcess = require("child_process");
const { test } = require("tap");
const flatted = require("flatted");
const sandbox = require("@log4js-node/sandboxed-module");
const recording = require("../../lib/appenders/recording");
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
},
end() {
fakeNet.closeCalled = true;
}
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: "1.2.3.4",
remotePort: "1234",
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
}
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
test("Multiprocess Appender", async batch => {
batch.beforeEach(() => {
recording.erase();
});
batch.test("worker", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
worker: {
type: "multiprocess",
mode: "worker",
loggerPort: 1234,
loggerHost: "pants"
}
},
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.close();
logger.info("after error, before connect");
fakeNet.cbs.connect();
logger.info("after error, after connect");
logger.error(new Error("Error test"));
const net = fakeNet;
t.test("should open a socket to the loggerPort and loggerHost", assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "pants");
assert.end();
});
t.test(
"should buffer messages written before socket is connected",
assert => {
assert.match(net.data[0], "before connect");
assert.end();
}
);
t.test(
"should write log messages to socket as flatted strings with a terminator string",
assert => {
assert.match(net.data[0], "before connect");
assert.equal(net.data[1], "__LOG4JS__");
assert.match(net.data[2], "after connect");
assert.equal(net.data[3], "__LOG4JS__");
assert.equal(net.encoding, "utf8");
assert.end();
}
);
t.test("should attempt to re-open the socket on error", assert => {
assert.match(net.data[4], "after error, before connect");
assert.equal(net.data[5], "__LOG4JS__");
assert.match(net.data[6], "after error, after connect");
assert.equal(net.data[7], "__LOG4JS__");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.test("should serialize an Error correctly", assert => {
assert.ok(
flatted.parse(net.data[8]).data[0].stack,
`Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(net.data[8]).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.test("worker with timeout", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.timeout();
logger.info("after timeout, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after timeout, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker with error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.error();
logger.info("after error, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after error, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
t.test("should open a socket to localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
batch.test("master", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet,
"./appenders/recording": recording
}
});
log4js.configure({
appenders: {
recorder: { type: "recording" },
master: {
type: "multiprocess",
mode: "master",
loggerPort: 1234,
loggerHost: "server",
appender: "recorder"
}
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
const net = fakeNet;
t.test(
"should listen for log messages on loggerPort and loggerHost",
assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "server");
assert.end();
}
);
t.test("should return the underlying appender", assert => {
log4js
.getLogger()
.info("this should be sent to the actual appender directly");
assert.equal(
recording.replay()[0].data[0],
"this should be sent to the actual appender directly"
);
assert.end();
});
t.test('should log the error on "error" event', assert => {
net.cbs.error(new Error("Expected error"));
const logEvents = recording.replay();
assert.plan(2);
assert.equal(logEvents.length, 1);
assert.equal(
"A worker log process hung up unexpectedly",
logEvents[0].data[0]
);
});
t.test("when a client connects", assert => {
const logString = `${flatted.stringify({
level: { level: 10000, levelStr: "DEBUG" },
data: ["some debug"]
})}__LOG4JS__`;
net.cbs.data(
`${flatted.stringify({
level: { level: 40000, levelStr: "ERROR" },
data: ["an error message"]
})}__LOG4JS__`
);
net.cbs.data(logString.slice(0, 10));
net.cbs.data(logString.slice(10));
net.cbs.data(logString + logString + logString);
net.cbs.end(
`${flatted.stringify({
level: { level: 50000, levelStr: "FATAL" },
data: ["that's all folks"]
})}__LOG4JS__`
);
net.cbs.data("bad message__LOG4JS__");
const logEvents = recording.replay();
// should parse log messages into log events and send to appender
assert.equal(logEvents[0].level.toString(), "ERROR");
assert.equal(logEvents[0].data[0], "an error message");
assert.equal(logEvents[0].remoteAddress, "1.2.3.4");
assert.equal(logEvents[0].remotePort, "1234");
// should parse log messages split into multiple chunks'
assert.equal(logEvents[1].level.toString(), "DEBUG");
assert.equal(logEvents[1].data[0], "some debug");
assert.equal(logEvents[1].remoteAddress, "1.2.3.4");
assert.equal(logEvents[1].remotePort, "1234");
// should parse multiple log messages in a single chunk'
assert.equal(logEvents[2].data[0], "some debug");
assert.equal(logEvents[3].data[0], "some debug");
assert.equal(logEvents[4].data[0], "some debug");
// should handle log messages sent as part of end event'
assert.equal(logEvents[5].data[0], "that's all folks");
// should handle unparseable log messages
assert.equal(logEvents[6].level.toString(), "ERROR");
assert.equal(logEvents[6].categoryName, "log4js");
assert.equal(logEvents[6].data[0], "Unable to parse log:");
assert.equal(logEvents[6].data[1], "bad message");
assert.end();
});
t.end();
});
batch.test("master without actual appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: { master: { type: "multiprocess", mode: "master" } },
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master must have an "appender" defined')
);
t.end();
});
batch.test("master with unknown appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: {
master: { type: "multiprocess", mode: "master", appender: "cheese" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master appender "cheese" not defined')
);
t.end();
});
batch.test("master defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
stdout: { type: "stdout" },
master: { type: "multiprocess", mode: "master", appender: "stdout" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
t.test("should listen for log messages on localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
await batch.test('e2e test', async (assert) => {
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recording: { type: 'recording' },
master: { type: 'multiprocess', mode: 'master', appender: 'recording', loggerPort: 5001 },
},
categories: { default: { appenders: ['recording'], level: 'trace' } },
});
const child = childProcess.fork(
require.resolve('../multiprocess-worker.js'),
['start-multiprocess-worker', '5001'],
{ stdio: 'inherit' }
);
const actualMsg = await new Promise((res, rej) => {
child.on('message', res);
child.on('error', rej);
});
const logEvents = recording.replay();
assert.equal(actualMsg, 'worker is done');
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'Logging from worker');
assert.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./lib/appenders/fileSync.js | const debug = require('debug')('log4js:fileSync');
const path = require('path');
const fs = require('fs');
const os = require('os');
const eol = os.EOL;
function touchFile(file, options) {
// if the file exists, nothing to do
if (fs.existsSync(file)) {
return;
}
// attempt to create the directory
const mkdir = (dir) => {
try {
return fs.mkdirSync(dir, {recursive: true});
}
// backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option)
catch (e) {
// recursive creation of parent first
if (e.code === 'ENOENT') {
mkdir(path.dirname(dir));
return mkdir(dir);
}
// throw error for all except EEXIST and EROFS (read-only filesystem)
if (e.code !== 'EEXIST' && e.code !== 'EROFS') {
throw e;
}
// EEXIST: throw if file and not directory
// EROFS : throw if directory not found
else {
try {
if (fs.statSync(dir).isDirectory()) {
return dir;
}
throw e;
} catch (err) {
throw e;
}
}
}
};
mkdir(path.dirname(file));
// touch the file to apply flags (like w to truncate the file)
const id = fs.openSync(file, options.flags, options.mode);
fs.closeSync(id);
}
class RollingFileSync {
constructor(filename, maxLogSize, backups, options) {
debug('In RollingFileStream');
if (maxLogSize < 0) {
throw new Error(`maxLogSize (${maxLogSize}) should be > 0`);
}
this.filename = filename;
this.size = maxLogSize;
this.backups = backups;
this.options = options;
this.currentSize = 0;
function currentFileSize(file) {
let fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
touchFile(file, options);
}
return fileSize;
}
this.currentSize = currentFileSize(this.filename);
}
shouldRoll() {
debug('should roll with current size %d, and max size %d', this.currentSize, this.size);
return this.currentSize >= this.size;
}
roll(filename) {
const that = this;
const nameMatcher = new RegExp(`^${path.basename(filename)}`);
function justTheseFiles(item) {
return nameMatcher.test(item);
}
function index(filename_) {
return parseInt(filename_.slice((`${path.basename(filename)}.`).length), 10) || 0;
}
function byIndex(a, b) {
return index(a) - index(b);
}
function increaseFileIndex(fileToRename) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (that.backups === 0) {
fs.truncateSync(filename, 0);
} else if (idx < that.backups) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
try {
fs.unlinkSync(`${filename}.${idx + 1}`);
} catch (e) {
// ignore err: if we could not delete, it's most likely that it doesn't exist
}
debug(`Renaming ${fileToRename} -> ${filename}.${idx + 1}`);
fs.renameSync(path.join(path.dirname(filename), fileToRename), `${filename}.${idx + 1}`);
}
}
function renameTheFiles() {
// roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug('Renaming the old files');
const files = fs.readdirSync(path.dirname(filename));
files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
}
debug('Rolling, rolling, rolling');
renameTheFiles();
}
// eslint-disable-next-line no-unused-vars
write(chunk, encoding) {
const that = this;
function writeTheChunk() {
debug('writing the chunk to the file');
that.currentSize += chunk.length;
fs.appendFileSync(that.filename, chunk);
}
debug('in write');
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename);
}
writeTheChunk();
}
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file the file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender(file, layout, logSize, numBackups, options, timezoneOffset) {
if (typeof file !== "string" || file.length === 0) {
throw new Error(`Invalid filename: ${file}`);
}
file = path.normalize(file);
numBackups = (!numBackups && numBackups !== 0) ? 5 : numBackups;
debug(
'Creating fileSync appender (',
file, ', ',
logSize, ', ',
numBackups, ', ',
options, ', ',
timezoneOffset, ')'
);
function openTheStream(filePath, fileSize, numFiles) {
let stream;
if (fileSize) {
stream = new RollingFileSync(
filePath,
fileSize,
numFiles,
options
);
} else {
stream = (((f) => {
// touch the file to apply flags (like w to truncate the file)
touchFile(f, options);
return {
write(data) {
fs.appendFileSync(f, data);
}
};
}))(filePath);
}
return stream;
}
const logFile = openTheStream(file, logSize, numBackups);
return (loggingEvent) => {
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
};
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
const options = {
flags: config.flags || 'a',
encoding: config.encoding || 'utf8',
mode: config.mode || 0o600
};
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
options,
config.timezoneOffset
);
}
module.exports.configure = configure;
| const debug = require('debug')('log4js:fileSync');
const path = require('path');
const fs = require('fs');
const os = require('os');
const eol = os.EOL;
function touchFile(file, options) {
// if the file exists, nothing to do
if (fs.existsSync(file)) {
return;
}
// attempt to create the directory
const mkdir = (dir) => {
try {
return fs.mkdirSync(dir, {recursive: true});
}
// backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option)
catch (e) {
// recursive creation of parent first
if (e.code === 'ENOENT') {
mkdir(path.dirname(dir));
return mkdir(dir);
}
// throw error for all except EEXIST and EROFS (read-only filesystem)
if (e.code !== 'EEXIST' && e.code !== 'EROFS') {
throw e;
}
// EEXIST: throw if file and not directory
// EROFS : throw if directory not found
else {
try {
if (fs.statSync(dir).isDirectory()) {
return dir;
}
throw e;
} catch (err) {
throw e;
}
}
}
};
mkdir(path.dirname(file));
// touch the file to apply flags (like w to truncate the file)
const id = fs.openSync(file, options.flags, options.mode);
fs.closeSync(id);
}
class RollingFileSync {
constructor(filename, maxLogSize, backups, options) {
debug('In RollingFileStream');
if (maxLogSize < 0) {
throw new Error(`maxLogSize (${maxLogSize}) should be > 0`);
}
this.filename = filename;
this.size = maxLogSize;
this.backups = backups;
this.options = options;
this.currentSize = 0;
function currentFileSize(file) {
let fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
touchFile(file, options);
}
return fileSize;
}
this.currentSize = currentFileSize(this.filename);
}
shouldRoll() {
debug('should roll with current size %d, and max size %d', this.currentSize, this.size);
return this.currentSize >= this.size;
}
roll(filename) {
const that = this;
const nameMatcher = new RegExp(`^${path.basename(filename)}`);
function justTheseFiles(item) {
return nameMatcher.test(item);
}
function index(filename_) {
return parseInt(filename_.slice((`${path.basename(filename)}.`).length), 10) || 0;
}
function byIndex(a, b) {
return index(a) - index(b);
}
function increaseFileIndex(fileToRename) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (that.backups === 0) {
fs.truncateSync(filename, 0);
} else if (idx < that.backups) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
try {
fs.unlinkSync(`${filename}.${idx + 1}`);
} catch (e) {
// ignore err: if we could not delete, it's most likely that it doesn't exist
}
debug(`Renaming ${fileToRename} -> ${filename}.${idx + 1}`);
fs.renameSync(path.join(path.dirname(filename), fileToRename), `${filename}.${idx + 1}`);
}
}
function renameTheFiles() {
// roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug('Renaming the old files');
const files = fs.readdirSync(path.dirname(filename));
files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
}
debug('Rolling, rolling, rolling');
renameTheFiles();
}
// eslint-disable-next-line no-unused-vars
write(chunk, encoding) {
const that = this;
function writeTheChunk() {
debug('writing the chunk to the file');
that.currentSize += chunk.length;
fs.appendFileSync(that.filename, chunk);
}
debug('in write');
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename);
}
writeTheChunk();
}
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file the file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender(file, layout, logSize, numBackups, options, timezoneOffset) {
if (typeof file !== "string" || file.length === 0) {
throw new Error(`Invalid filename: ${file}`);
}
file = path.normalize(file);
numBackups = (!numBackups && numBackups !== 0) ? 5 : numBackups;
debug(
'Creating fileSync appender (',
file, ', ',
logSize, ', ',
numBackups, ', ',
options, ', ',
timezoneOffset, ')'
);
function openTheStream(filePath, fileSize, numFiles) {
let stream;
if (fileSize) {
stream = new RollingFileSync(
filePath,
fileSize,
numFiles,
options
);
} else {
stream = (((f) => {
// touch the file to apply flags (like w to truncate the file)
touchFile(f, options);
return {
write(data) {
fs.appendFileSync(f, data);
}
};
}))(filePath);
}
return stream;
}
const logFile = openTheStream(file, logSize, numBackups);
return (loggingEvent) => {
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
};
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
const options = {
flags: config.flags || 'a',
encoding: config.encoding || 'utf8',
mode: config.mode || 0o600
};
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
options,
config.timezoneOffset
);
}
module.exports.configure = configure;
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./lib/appenders/stdout.js |
function stdoutAppender(layout, timezoneOffset) {
return (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
}
function configure(config, layouts) {
let layout = layouts.colouredLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return stdoutAppender(layout, config.timezoneOffset);
}
exports.configure = configure;
|
function stdoutAppender(layout, timezoneOffset) {
return (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
}
function configure(config, layouts) {
let layout = layouts.colouredLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return stdoutAppender(layout, config.timezoneOffset);
}
exports.configure = configure;
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./test/tap/dateFileAppender-test.js | /* eslint max-classes-per-file: ["error", 3] */
const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const EOL = require("os").EOL || "\n";
const format = require("date-format");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
function removeFile(filename) {
try {
fs.unlinkSync(path.join(__dirname, filename));
} catch (e) {
// doesn't matter
}
}
test("../../lib/appenders/dateFile", batch => {
batch.test("with default settings", t => {
const testFile = path.join(__dirname, "date-appender-default.log");
log4js.configure({
appenders: { date: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["date"], level: "DEBUG" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("This should be in the file.");
t.teardown(() => {
removeFile("date-appender-default.log");
});
setTimeout(() => {
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, "This should be in the file");
t.match(
contents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
}, 100);
});
batch.test("configure with dateFileAppender", t => {
log4js.configure({
appenders: {
date: {
type: "dateFile",
filename: "test/tap/date-file-test.log",
pattern: "-yyyy-MM-dd",
layout: { type: "messagePassThrough" }
}
},
categories: { default: { appenders: ["date"], level: "WARN" } }
});
const logger = log4js.getLogger("tests");
logger.info("this should not be written to the file");
logger.warn("this should be written to the file");
log4js.shutdown(() => {
fs.readFile(
path.join(__dirname, "date-file-test.log"),
"utf8",
(err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(
contents.indexOf("this should not be written to the file"),
-1
);
t.end();
}
);
});
t.teardown(() => {
removeFile("date-file-test.log");
});
});
batch.test("configure with options.alwaysIncludePattern", t => {
const options = {
appenders: {
date: {
category: "tests",
type: "dateFile",
filename: "test/tap/date-file-test",
pattern: "yyyy-MM-dd.log",
alwaysIncludePattern: true,
layout: {
type: "messagePassThrough"
}
}
},
categories: { default: { appenders: ["date"], level: "debug" } }
};
const thisTime = format.asString(
options.appenders.date.pattern,
new Date()
);
const testFile = `date-file-test.${thisTime}`;
const existingFile = path.join(
__dirname,
testFile
);
fs.writeFileSync(existingFile, `this is existing data${EOL}`, "utf8");
log4js.configure(options);
const logger = log4js.getLogger("tests");
logger.warn("this should be written to the file with the appended date");
t.teardown(() => {
removeFile(testFile);
});
// wait for filesystem to catch up
log4js.shutdown(() => {
fs.readFile(existingFile, "utf8", (err, contents) => {
t.match(
contents,
"this is existing data",
"should not overwrite the file on open (issue #132)"
);
t.match(
contents,
"this should be written to the file with the appended date"
);
t.end();
});
});
});
batch.test("should flush logs on shutdown", t => {
const testFile = path.join(__dirname, "date-appender-flush.log");
log4js.configure({
appenders: { test: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["test"], level: "trace" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("1");
logger.info("2");
logger.info("3");
t.teardown(() => {
removeFile("date-appender-flush.log");
});
log4js.shutdown(() => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test("should map maxLogSize to maxSize", t => {
const fakeStreamroller = {};
class DateRollingFileStream {
constructor(filename, pattern, options) {
fakeStreamroller.filename = filename;
fakeStreamroller.pattern = pattern;
fakeStreamroller.options = options;
}
on() { } // eslint-disable-line class-methods-use-this
}
fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
const dateFileAppenderModule = sandbox.require(
"../../lib/appenders/dateFile",
{
requires: { streamroller: fakeStreamroller }
}
);
dateFileAppenderModule.configure(
{
filename: "cheese.log",
pattern: "yyyy",
maxLogSize: 100
},
{ basicLayout: () => {} }
);
t.equal(fakeStreamroller.options.maxSize, 100);
t.end();
});
batch.test("handling of writer.writable", t => {
const output = [];
let writable = true;
const DateRollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
on() { // eslint-disable-line class-methods-use-this
}
get writable() { // eslint-disable-line class-methods-use-this
return writable;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
requires: {
streamroller: {
DateRollingFileStream
}
}
});
const appender = dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout(loggingEvent) { return loggingEvent.data; } }
);
t.test("should log when writer.writable=true", assert => {
writable = true;
appender({data: "something to log"});
assert.ok(output.length, 1);
assert.match(output[output.length - 1], "something to log");
assert.end();
});
t.test("should not log when writer.writable=false", assert => {
writable = false;
appender({data: "this should not be logged"});
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], "this should not be logged");
assert.end();
});
t.end();
});
batch.test("when underlying stream errors", t => {
let consoleArgs;
let errorHandler;
const DateRollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === "error") {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
globals: {
console: {
error(...args) {
consoleArgs = args;
}
}
},
requires: {
streamroller: {
DateRollingFileStream
}
}
});
dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: "aargh" });
t.test("should log the error to console.error", assert => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
"log4js.dateFileAppender - Writing to file %s, error happened "
);
assert.equal(consoleArgs[1], "test1.log");
assert.equal(consoleArgs[2].error, "aargh");
assert.end();
});
t.end();
});
batch.end();
});
| /* eslint max-classes-per-file: ["error", 3] */
const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const EOL = require("os").EOL || "\n";
const format = require("date-format");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
function removeFile(filename) {
try {
fs.unlinkSync(path.join(__dirname, filename));
} catch (e) {
// doesn't matter
}
}
test("../../lib/appenders/dateFile", batch => {
batch.test("with default settings", t => {
const testFile = path.join(__dirname, "date-appender-default.log");
log4js.configure({
appenders: { date: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["date"], level: "DEBUG" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("This should be in the file.");
t.teardown(() => {
removeFile("date-appender-default.log");
});
setTimeout(() => {
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, "This should be in the file");
t.match(
contents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
}, 100);
});
batch.test("configure with dateFileAppender", t => {
log4js.configure({
appenders: {
date: {
type: "dateFile",
filename: "test/tap/date-file-test.log",
pattern: "-yyyy-MM-dd",
layout: { type: "messagePassThrough" }
}
},
categories: { default: { appenders: ["date"], level: "WARN" } }
});
const logger = log4js.getLogger("tests");
logger.info("this should not be written to the file");
logger.warn("this should be written to the file");
log4js.shutdown(() => {
fs.readFile(
path.join(__dirname, "date-file-test.log"),
"utf8",
(err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(
contents.indexOf("this should not be written to the file"),
-1
);
t.end();
}
);
});
t.teardown(() => {
removeFile("date-file-test.log");
});
});
batch.test("configure with options.alwaysIncludePattern", t => {
const options = {
appenders: {
date: {
category: "tests",
type: "dateFile",
filename: "test/tap/date-file-test",
pattern: "yyyy-MM-dd.log",
alwaysIncludePattern: true,
layout: {
type: "messagePassThrough"
}
}
},
categories: { default: { appenders: ["date"], level: "debug" } }
};
const thisTime = format.asString(
options.appenders.date.pattern,
new Date()
);
const testFile = `date-file-test.${thisTime}`;
const existingFile = path.join(
__dirname,
testFile
);
fs.writeFileSync(existingFile, `this is existing data${EOL}`, "utf8");
log4js.configure(options);
const logger = log4js.getLogger("tests");
logger.warn("this should be written to the file with the appended date");
t.teardown(() => {
removeFile(testFile);
});
// wait for filesystem to catch up
log4js.shutdown(() => {
fs.readFile(existingFile, "utf8", (err, contents) => {
t.match(
contents,
"this is existing data",
"should not overwrite the file on open (issue #132)"
);
t.match(
contents,
"this should be written to the file with the appended date"
);
t.end();
});
});
});
batch.test("should flush logs on shutdown", t => {
const testFile = path.join(__dirname, "date-appender-flush.log");
log4js.configure({
appenders: { test: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["test"], level: "trace" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("1");
logger.info("2");
logger.info("3");
t.teardown(() => {
removeFile("date-appender-flush.log");
});
log4js.shutdown(() => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test("should map maxLogSize to maxSize", t => {
const fakeStreamroller = {};
class DateRollingFileStream {
constructor(filename, pattern, options) {
fakeStreamroller.filename = filename;
fakeStreamroller.pattern = pattern;
fakeStreamroller.options = options;
}
on() { } // eslint-disable-line class-methods-use-this
}
fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
const dateFileAppenderModule = sandbox.require(
"../../lib/appenders/dateFile",
{
requires: { streamroller: fakeStreamroller }
}
);
dateFileAppenderModule.configure(
{
filename: "cheese.log",
pattern: "yyyy",
maxLogSize: 100
},
{ basicLayout: () => {} }
);
t.equal(fakeStreamroller.options.maxSize, 100);
t.end();
});
batch.test("handling of writer.writable", t => {
const output = [];
let writable = true;
const DateRollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
on() { // eslint-disable-line class-methods-use-this
}
get writable() { // eslint-disable-line class-methods-use-this
return writable;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
requires: {
streamroller: {
DateRollingFileStream
}
}
});
const appender = dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout(loggingEvent) { return loggingEvent.data; } }
);
t.test("should log when writer.writable=true", assert => {
writable = true;
appender({data: "something to log"});
assert.ok(output.length, 1);
assert.match(output[output.length - 1], "something to log");
assert.end();
});
t.test("should not log when writer.writable=false", assert => {
writable = false;
appender({data: "this should not be logged"});
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], "this should not be logged");
assert.end();
});
t.end();
});
batch.test("when underlying stream errors", t => {
let consoleArgs;
let errorHandler;
const DateRollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === "error") {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
globals: {
console: {
error(...args) {
consoleArgs = args;
}
}
},
requires: {
streamroller: {
DateRollingFileStream
}
}
});
dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: "aargh" });
t.test("should log the error to console.error", assert => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
"log4js.dateFileAppender - Writing to file %s, error happened "
);
assert.equal(consoleArgs[1], "test1.log");
assert.equal(consoleArgs[2].error, "aargh");
assert.end();
});
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./lib/appenders/multiFile.js |
const debug = require('debug')('log4js:multiFile');
const path = require('path');
const fileAppender = require('./file');
const findFileKey = (property, event) => event[property] || event.context[property];
module.exports.configure = (config, layouts) => {
debug('Creating a multi-file appender');
const files = new Map();
const timers = new Map();
function checkForTimeout(fileKey) {
const timer = timers.get(fileKey);
const app = files.get(fileKey);
/* istanbul ignore else: failsafe */
if (timer && app) {
if (Date.now() - timer.lastUsed > timer.timeout) {
debug('%s not used for > %d ms => close', fileKey, timer.timeout);
clearInterval(timer.interval);
timers.delete(fileKey);
files.delete(fileKey);
app.shutdown((err) => {
if (err) {
debug('ignore error on file shutdown: %s', err.message);
}
});
}
} else {
// will never get here as files and timers are coupled to be added and deleted at same place
debug('timer or app does not exist');
}
}
const appender = (logEvent) => {
const fileKey = findFileKey(config.property, logEvent);
debug('fileKey for property ', config.property, ' is ', fileKey);
if (fileKey) {
let file = files.get(fileKey);
debug('existing file appender is ', file);
if (!file) {
debug('creating new file appender');
config.filename = path.join(config.base, fileKey + config.extension);
file = fileAppender.configure(config, layouts);
files.set(fileKey, file);
if (config.timeout) {
debug('creating new timer');
timers.set(fileKey, {
timeout: config.timeout,
lastUsed: Date.now(),
interval: setInterval(checkForTimeout.bind(null, fileKey), config.timeout)
});
}
} else if (config.timeout) {
debug('%s extending activity', fileKey);
timers.get(fileKey).lastUsed = Date.now();
}
file(logEvent);
} else {
debug('No fileKey for logEvent, quietly ignoring this log event');
}
};
appender.shutdown = (cb) => {
let shutdownFunctions = files.size;
if (shutdownFunctions <= 0) {
cb();
}
let error;
timers.forEach((timer, fileKey) => {
debug('clearing timer for ', fileKey);
clearInterval(timer.interval);
});
files.forEach((app, fileKey) => {
debug('calling shutdown for ', fileKey);
app.shutdown((err) => {
error = error || err;
shutdownFunctions -= 1;
if (shutdownFunctions <= 0) {
cb(error);
}
});
});
};
return appender;
};
|
const debug = require('debug')('log4js:multiFile');
const path = require('path');
const fileAppender = require('./file');
const findFileKey = (property, event) => event[property] || event.context[property];
module.exports.configure = (config, layouts) => {
debug('Creating a multi-file appender');
const files = new Map();
const timers = new Map();
function checkForTimeout(fileKey) {
const timer = timers.get(fileKey);
const app = files.get(fileKey);
/* istanbul ignore else: failsafe */
if (timer && app) {
if (Date.now() - timer.lastUsed > timer.timeout) {
debug('%s not used for > %d ms => close', fileKey, timer.timeout);
clearInterval(timer.interval);
timers.delete(fileKey);
files.delete(fileKey);
app.shutdown((err) => {
if (err) {
debug('ignore error on file shutdown: %s', err.message);
}
});
}
} else {
// will never get here as files and timers are coupled to be added and deleted at same place
debug('timer or app does not exist');
}
}
const appender = (logEvent) => {
const fileKey = findFileKey(config.property, logEvent);
debug('fileKey for property ', config.property, ' is ', fileKey);
if (fileKey) {
let file = files.get(fileKey);
debug('existing file appender is ', file);
if (!file) {
debug('creating new file appender');
config.filename = path.join(config.base, fileKey + config.extension);
file = fileAppender.configure(config, layouts);
files.set(fileKey, file);
if (config.timeout) {
debug('creating new timer');
timers.set(fileKey, {
timeout: config.timeout,
lastUsed: Date.now(),
interval: setInterval(checkForTimeout.bind(null, fileKey), config.timeout)
});
}
} else if (config.timeout) {
debug('%s extending activity', fileKey);
timers.get(fileKey).lastUsed = Date.now();
}
file(logEvent);
} else {
debug('No fileKey for logEvent, quietly ignoring this log event');
}
};
appender.shutdown = (cb) => {
let shutdownFunctions = files.size;
if (shutdownFunctions <= 0) {
cb();
}
let error;
timers.forEach((timer, fileKey) => {
debug('clearing timer for ', fileKey);
clearInterval(timer.interval);
});
files.forEach((app, fileKey) => {
debug('calling shutdown for ', fileKey);
app.shutdown((err) => {
error = error || err;
shutdownFunctions -= 1;
if (shutdownFunctions <= 0) {
cb(error);
}
});
});
};
return appender;
};
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./.git/hooks/applypatch-msg.sample | #!/bin/sh
#
# An example hook script to check the commit log message taken by
# applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit. The hook is
# allowed to edit the commit message file.
#
# To enable this hook, rename this file to "applypatch-msg".
. git-sh-setup
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
:
| #!/bin/sh
#
# An example hook script to check the commit log message taken by
# applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit. The hook is
# allowed to edit the commit message file.
#
# To enable this hook, rename this file to "applypatch-msg".
. git-sh-setup
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
:
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./examples/rabbitmq-appender.js | // Note that rabbitmq appender needs install amqplib to work.
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console'
},
file: {
type: 'dateFile',
filename: 'logs/log.txt',
pattern: 'yyyyMMdd',
alwaysIncludePattern: false
},
mq: {
type: '@log4js-node/rabbitmq',
host: '127.0.0.1',
port: 5672,
username: 'guest',
password: 'guest',
routing_key: 'logstash',
exchange: 'exchange_logs',
mq_type: 'direct',
durable: true,
layout: {
type: 'pattern',
pattern: '%d{yyyy-MM-dd hh:mm:ss:SSS}#%p#%m'
}
}
},
categories: {
default: { appenders: ['out'], level: 'info' },
dateFile: { appenders: ['file'], level: 'info' },
rabbitmq: { appenders: ['mq'], level: 'info' }
}
});
const log = log4js.getLogger('console');
const logRabbitmq = log4js.getLogger('rabbitmq');
function doTheLogging(x) {
log.info('Logging something %d', x);
logRabbitmq.info('Logging something %d', x);
}
for (let i = 0; i < 500; i += 1) {
doTheLogging(i);
}
| // Note that rabbitmq appender needs install amqplib to work.
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console'
},
file: {
type: 'dateFile',
filename: 'logs/log.txt',
pattern: 'yyyyMMdd',
alwaysIncludePattern: false
},
mq: {
type: '@log4js-node/rabbitmq',
host: '127.0.0.1',
port: 5672,
username: 'guest',
password: 'guest',
routing_key: 'logstash',
exchange: 'exchange_logs',
mq_type: 'direct',
durable: true,
layout: {
type: 'pattern',
pattern: '%d{yyyy-MM-dd hh:mm:ss:SSS}#%p#%m'
}
}
},
categories: {
default: { appenders: ['out'], level: 'info' },
dateFile: { appenders: ['file'], level: 'info' },
rabbitmq: { appenders: ['mq'], level: 'info' }
}
});
const log = log4js.getLogger('console');
const logRabbitmq = log4js.getLogger('rabbitmq');
function doTheLogging(x) {
log.info('Logging something %d', x);
logRabbitmq.info('Logging something %d', x);
}
for (let i = 0; i < 500; i += 1) {
doTheLogging(i);
}
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./.git/description | Unnamed repository; edit this file 'description' to name the repository.
| Unnamed repository; edit this file 'description' to name the repository.
| -1 |
log4js-node/log4js-node | 1,240 | chore(docs): updated comments in typescript def | Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | lamweili | 2022-05-11T05:47:01Z | 2022-05-11T17:06:30Z | 3b56fae2dd21923c6b7068e8cecf798c65c555d9 | 362a397f7d1723a200cad39ea6ef7a4c755e5500 | chore(docs): updated comments in typescript def. Fixes #1239
- based on https://github.com/log4js-node/log4js-node/commit/faef3d20f643a9e1a29883f493cdbe893a47de77
- based on https://github.com/log4js-node/log4js-node/commit/d894f573058d50f6c41e9048f46fadc037097b16
- based on https://github.com/log4js-node/log4js-node/commit/e2947474c719d891b282858a4509fa8292f7ac22
- based on https://github.com/log4js-node/log4js-node/pull/1182
- based on https://github.com/log4js-node/log4js-node/pull/1181 | ./docs/_config.yml | theme: jekyll-theme-minimal
repository: nomiddlename/log4js-node
| theme: jekyll-theme-minimal
repository: nomiddlename/log4js-node
| -1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./lib/appenders/adapters.js | function maxFileSizeUnitTransform(maxLogSize) {
if (typeof maxLogSize === 'number' && Number.isInteger(maxLogSize)) {
return maxLogSize;
}
const units = {
K: 1024,
M: 1024 * 1024,
G: 1024 * 1024 * 1024,
};
const validUnit = Object.keys(units);
const unit = maxLogSize.substr(maxLogSize.length - 1).toLocaleUpperCase();
const value = maxLogSize.substring(0, maxLogSize.length - 1).trim();
if (validUnit.indexOf(unit) < 0 || !Number.isInteger(Number(value))) {
throw Error(`maxLogSize: "${maxLogSize}" is invalid`);
} else {
return value * units[unit];
}
}
function adapter(configAdapter, config) {
const newConfig = Object.assign({}, config); // eslint-disable-line prefer-object-spread
Object.keys(configAdapter).forEach((key) => {
if (newConfig[key]) {
newConfig[key] = configAdapter[key](config[key]);
}
});
return newConfig;
}
function fileAppenderAdapter(config) {
const configAdapter = {
maxLogSize: maxFileSizeUnitTransform
};
return adapter(configAdapter, config);
}
const adapters = {
file: fileAppenderAdapter,
fileSync: fileAppenderAdapter
};
module.exports.modifyConfig = config => (adapters[config.type] ? adapters[config.type](config) : config);
| function maxFileSizeUnitTransform(maxLogSize) {
if (typeof maxLogSize === 'number' && Number.isInteger(maxLogSize)) {
return maxLogSize;
}
const units = {
K: 1024,
M: 1024 * 1024,
G: 1024 * 1024 * 1024,
};
const validUnit = Object.keys(units);
const unit = maxLogSize.slice(-1).toLocaleUpperCase();
const value = maxLogSize.slice(0, -1).trim();
if (validUnit.indexOf(unit) < 0 || !Number.isInteger(Number(value))) {
throw Error(`maxLogSize: "${maxLogSize}" is invalid`);
} else {
return value * units[unit];
}
}
function adapter(configAdapter, config) {
const newConfig = Object.assign({}, config); // eslint-disable-line prefer-object-spread
Object.keys(configAdapter).forEach((key) => {
if (newConfig[key]) {
newConfig[key] = configAdapter[key](config[key]);
}
});
return newConfig;
}
function fileAppenderAdapter(config) {
const configAdapter = {
maxLogSize: maxFileSizeUnitTransform
};
return adapter(configAdapter, config);
}
const adapters = {
file: fileAppenderAdapter,
fileSync: fileAppenderAdapter
};
module.exports.modifyConfig = config => (adapters[config.type] ? adapters[config.type](config) : config);
| 1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./lib/appenders/fileSync.js | const debug = require('debug')('log4js:fileSync');
const path = require('path');
const fs = require('fs');
const os = require('os');
const eol = os.EOL;
function touchFile(file, options) {
// if the file exists, nothing to do
if (fs.existsSync(file)) {
return;
}
// attempt to create the directory
const mkdir = (dir) => {
try {
return fs.mkdirSync(dir, {recursive: true});
}
// backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option)
catch (e) {
// recursive creation of parent first
if (e.code === 'ENOENT') {
mkdir(path.dirname(dir));
return mkdir(dir);
}
// throw error for all except EEXIST and EROFS (read-only filesystem)
if (e.code !== 'EEXIST' && e.code !== 'EROFS') {
throw e;
}
// EEXIST: throw if file and not directory
// EROFS : throw if directory not found
else {
try {
if (fs.statSync(dir).isDirectory()) {
return dir;
}
throw e;
} catch (err) {
throw e;
}
}
}
};
mkdir(path.dirname(file));
// touch the file to apply flags (like w to truncate the file)
const id = fs.openSync(file, options.flags, options.mode);
fs.closeSync(id);
}
class RollingFileSync {
constructor(filename, maxLogSize, backups, options) {
debug('In RollingFileStream');
if (maxLogSize < 0) {
throw new Error(`maxLogSize (${maxLogSize}) should be > 0`);
}
this.filename = filename;
this.size = maxLogSize;
this.backups = backups;
this.options = options;
this.currentSize = 0;
function currentFileSize(file) {
let fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
touchFile(file, options);
}
return fileSize;
}
this.currentSize = currentFileSize(this.filename);
}
shouldRoll() {
debug('should roll with current size %d, and max size %d', this.currentSize, this.size);
return this.currentSize >= this.size;
}
roll(filename) {
const that = this;
const nameMatcher = new RegExp(`^${path.basename(filename)}`);
function justTheseFiles(item) {
return nameMatcher.test(item);
}
function index(filename_) {
return parseInt(filename_.substring((`${path.basename(filename)}.`).length), 10) || 0;
}
function byIndex(a, b) {
return index(a) - index(b);
}
function increaseFileIndex(fileToRename) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (that.backups === 0) {
fs.truncateSync(filename, 0);
} else if (idx < that.backups) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
try {
fs.unlinkSync(`${filename}.${idx + 1}`);
} catch (e) {
// ignore err: if we could not delete, it's most likely that it doesn't exist
}
debug(`Renaming ${fileToRename} -> ${filename}.${idx + 1}`);
fs.renameSync(path.join(path.dirname(filename), fileToRename), `${filename}.${idx + 1}`);
}
}
function renameTheFiles() {
// roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug('Renaming the old files');
const files = fs.readdirSync(path.dirname(filename));
files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
}
debug('Rolling, rolling, rolling');
renameTheFiles();
}
// eslint-disable-next-line no-unused-vars
write(chunk, encoding) {
const that = this;
function writeTheChunk() {
debug('writing the chunk to the file');
that.currentSize += chunk.length;
fs.appendFileSync(that.filename, chunk);
}
debug('in write');
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename);
}
writeTheChunk();
}
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file the file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender(file, layout, logSize, numBackups, options, timezoneOffset) {
if (typeof file !== "string" || file.length === 0) {
throw new Error(`Invalid filename: ${file}`);
}
file = path.normalize(file);
numBackups = (!numBackups && numBackups !== 0) ? 5 : numBackups;
debug(
'Creating fileSync appender (',
file, ', ',
logSize, ', ',
numBackups, ', ',
options, ', ',
timezoneOffset, ')'
);
function openTheStream(filePath, fileSize, numFiles) {
let stream;
if (fileSize) {
stream = new RollingFileSync(
filePath,
fileSize,
numFiles,
options
);
} else {
stream = (((f) => {
// touch the file to apply flags (like w to truncate the file)
touchFile(f, options);
return {
write(data) {
fs.appendFileSync(f, data);
}
};
}))(filePath);
}
return stream;
}
const logFile = openTheStream(file, logSize, numBackups);
return (loggingEvent) => {
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
};
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
const options = {
flags: config.flags || 'a',
encoding: config.encoding || 'utf8',
mode: config.mode || 0o600
};
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
options,
config.timezoneOffset
);
}
module.exports.configure = configure;
| const debug = require('debug')('log4js:fileSync');
const path = require('path');
const fs = require('fs');
const os = require('os');
const eol = os.EOL;
function touchFile(file, options) {
// if the file exists, nothing to do
if (fs.existsSync(file)) {
return;
}
// attempt to create the directory
const mkdir = (dir) => {
try {
return fs.mkdirSync(dir, {recursive: true});
}
// backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option)
catch (e) {
// recursive creation of parent first
if (e.code === 'ENOENT') {
mkdir(path.dirname(dir));
return mkdir(dir);
}
// throw error for all except EEXIST and EROFS (read-only filesystem)
if (e.code !== 'EEXIST' && e.code !== 'EROFS') {
throw e;
}
// EEXIST: throw if file and not directory
// EROFS : throw if directory not found
else {
try {
if (fs.statSync(dir).isDirectory()) {
return dir;
}
throw e;
} catch (err) {
throw e;
}
}
}
};
mkdir(path.dirname(file));
// touch the file to apply flags (like w to truncate the file)
const id = fs.openSync(file, options.flags, options.mode);
fs.closeSync(id);
}
class RollingFileSync {
constructor(filename, maxLogSize, backups, options) {
debug('In RollingFileStream');
if (maxLogSize < 0) {
throw new Error(`maxLogSize (${maxLogSize}) should be > 0`);
}
this.filename = filename;
this.size = maxLogSize;
this.backups = backups;
this.options = options;
this.currentSize = 0;
function currentFileSize(file) {
let fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
touchFile(file, options);
}
return fileSize;
}
this.currentSize = currentFileSize(this.filename);
}
shouldRoll() {
debug('should roll with current size %d, and max size %d', this.currentSize, this.size);
return this.currentSize >= this.size;
}
roll(filename) {
const that = this;
const nameMatcher = new RegExp(`^${path.basename(filename)}`);
function justTheseFiles(item) {
return nameMatcher.test(item);
}
function index(filename_) {
return parseInt(filename_.slice((`${path.basename(filename)}.`).length), 10) || 0;
}
function byIndex(a, b) {
return index(a) - index(b);
}
function increaseFileIndex(fileToRename) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (that.backups === 0) {
fs.truncateSync(filename, 0);
} else if (idx < that.backups) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
try {
fs.unlinkSync(`${filename}.${idx + 1}`);
} catch (e) {
// ignore err: if we could not delete, it's most likely that it doesn't exist
}
debug(`Renaming ${fileToRename} -> ${filename}.${idx + 1}`);
fs.renameSync(path.join(path.dirname(filename), fileToRename), `${filename}.${idx + 1}`);
}
}
function renameTheFiles() {
// roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug('Renaming the old files');
const files = fs.readdirSync(path.dirname(filename));
files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
}
debug('Rolling, rolling, rolling');
renameTheFiles();
}
// eslint-disable-next-line no-unused-vars
write(chunk, encoding) {
const that = this;
function writeTheChunk() {
debug('writing the chunk to the file');
that.currentSize += chunk.length;
fs.appendFileSync(that.filename, chunk);
}
debug('in write');
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename);
}
writeTheChunk();
}
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file the file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender(file, layout, logSize, numBackups, options, timezoneOffset) {
if (typeof file !== "string" || file.length === 0) {
throw new Error(`Invalid filename: ${file}`);
}
file = path.normalize(file);
numBackups = (!numBackups && numBackups !== 0) ? 5 : numBackups;
debug(
'Creating fileSync appender (',
file, ', ',
logSize, ', ',
numBackups, ', ',
options, ', ',
timezoneOffset, ')'
);
function openTheStream(filePath, fileSize, numFiles) {
let stream;
if (fileSize) {
stream = new RollingFileSync(
filePath,
fileSize,
numFiles,
options
);
} else {
stream = (((f) => {
// touch the file to apply flags (like w to truncate the file)
touchFile(f, options);
return {
write(data) {
fs.appendFileSync(f, data);
}
};
}))(filePath);
}
return stream;
}
const logFile = openTheStream(file, logSize, numBackups);
return (loggingEvent) => {
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
};
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
const options = {
flags: config.flags || 'a',
encoding: config.encoding || 'utf8',
mode: config.mode || 0o600
};
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
options,
config.timezoneOffset
);
}
module.exports.configure = configure;
| 1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./lib/appenders/multiprocess.js |
const debug = require('debug')('log4js:multiprocess');
const net = require('net');
const LoggingEvent = require('../LoggingEvent');
const END_MSG = '__LOG4JS__';
/**
* Creates a server, listening on config.loggerPort, config.loggerHost.
* Output goes to config.actualAppender (config.appender is used to
* set up that appender).
*/
function logServer(config, actualAppender, levels) {
/**
* Takes a utf-8 string, returns an object with
* the correct log properties.
*/
function deserializeLoggingEvent(clientSocket, msg) {
debug('(master) deserialising log event');
const loggingEvent = LoggingEvent.deserialise(msg);
loggingEvent.remoteAddress = clientSocket.remoteAddress;
loggingEvent.remotePort = clientSocket.remotePort;
return loggingEvent;
}
const server = net.createServer((clientSocket) => {
debug('(master) connection received');
clientSocket.setEncoding('utf8');
let logMessage = '';
function logTheMessage(msg) {
debug('(master) deserialising log event and sending to actual appender');
actualAppender(deserializeLoggingEvent(clientSocket, msg));
}
function chunkReceived(chunk) {
debug('(master) chunk of data received');
let event;
logMessage += chunk || '';
if (logMessage.indexOf(END_MSG) > -1) {
event = logMessage.substring(0, logMessage.indexOf(END_MSG));
logTheMessage(event);
logMessage = logMessage.substring(event.length + END_MSG.length) || '';
// check for more, maybe it was a big chunk
chunkReceived();
}
}
function handleError(error) {
const loggingEvent = {
startTime: new Date(),
categoryName: 'log4js',
level: levels.ERROR,
data: ['A worker log process hung up unexpectedly', error],
remoteAddress: clientSocket.remoteAddress,
remotePort: clientSocket.remotePort
};
actualAppender(loggingEvent);
}
clientSocket.on('data', chunkReceived);
clientSocket.on('end', chunkReceived);
clientSocket.on('error', handleError);
});
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost', (e) => {
debug('(master) master server listening, error was ', e);
// allow the process to exit, if this is the only socket active
server.unref();
});
function app(event) {
debug('(master) log event sent directly to actual appender (local event)');
return actualAppender(event);
}
app.shutdown = function (cb) {
debug('(master) master shutdown called, closing server');
server.close(cb);
};
return app;
}
function workerAppender(config) {
let canWrite = false;
const buffer = [];
let socket;
let shutdownAttempts = 3;
function write(loggingEvent) {
debug('(worker) Writing log event to socket');
socket.write(loggingEvent.serialise(), 'utf8');
socket.write(END_MSG, 'utf8');
}
function emptyBuffer() {
let evt;
debug('(worker) emptying worker buffer');
while ((evt = buffer.shift())) {
write(evt);
}
}
function createSocket() {
debug(
`(worker) worker appender creating socket to ${config.loggerHost || 'localhost'}:${config.loggerPort || 5000}`
);
socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
socket.on('connect', () => {
debug('(worker) worker socket connected');
emptyBuffer();
canWrite = true;
});
socket.on('timeout', socket.end.bind(socket));
socket.on('error', (e) => {
debug('connection error', e);
canWrite = false;
emptyBuffer();
});
socket.on('close', createSocket);
}
createSocket();
function log(loggingEvent) {
if (canWrite) {
write(loggingEvent);
} else {
debug('(worker) worker buffering log event because it cannot write at the moment');
buffer.push(loggingEvent);
}
}
log.shutdown = function (cb) {
debug('(worker) worker shutdown called');
if (buffer.length && shutdownAttempts) {
debug('(worker) worker buffer has items, waiting 100ms to empty');
shutdownAttempts -= 1;
setTimeout(() => {
log.shutdown(cb);
}, 100);
} else {
socket.removeAllListeners('close');
socket.end(cb);
}
};
return log;
}
function createAppender(config, appender, levels) {
if (config.mode === 'master') {
debug('Creating master appender');
return logServer(config, appender, levels);
}
debug('Creating worker appender');
return workerAppender(config);
}
function configure(config, layouts, findAppender, levels) {
let appender;
debug(`configure with mode = ${config.mode}`);
if (config.mode === 'master') {
if (!config.appender) {
debug(`no appender found in config ${config}`);
throw new Error('multiprocess master must have an "appender" defined');
}
debug(`actual appender is ${config.appender}`);
appender = findAppender(config.appender);
if (!appender) {
debug(`actual appender "${config.appender}" not found`);
throw new Error(`multiprocess master appender "${config.appender}" not defined`);
}
}
return createAppender(config, appender, levels);
}
module.exports.configure = configure;
|
const debug = require('debug')('log4js:multiprocess');
const net = require('net');
const LoggingEvent = require('../LoggingEvent');
const END_MSG = '__LOG4JS__';
/**
* Creates a server, listening on config.loggerPort, config.loggerHost.
* Output goes to config.actualAppender (config.appender is used to
* set up that appender).
*/
function logServer(config, actualAppender, levels) {
/**
* Takes a utf-8 string, returns an object with
* the correct log properties.
*/
function deserializeLoggingEvent(clientSocket, msg) {
debug('(master) deserialising log event');
const loggingEvent = LoggingEvent.deserialise(msg);
loggingEvent.remoteAddress = clientSocket.remoteAddress;
loggingEvent.remotePort = clientSocket.remotePort;
return loggingEvent;
}
const server = net.createServer((clientSocket) => {
debug('(master) connection received');
clientSocket.setEncoding('utf8');
let logMessage = '';
function logTheMessage(msg) {
debug('(master) deserialising log event and sending to actual appender');
actualAppender(deserializeLoggingEvent(clientSocket, msg));
}
function chunkReceived(chunk) {
debug('(master) chunk of data received');
let event;
logMessage += chunk || '';
if (logMessage.indexOf(END_MSG) > -1) {
event = logMessage.slice(0, logMessage.indexOf(END_MSG));
logTheMessage(event);
logMessage = logMessage.slice(event.length + END_MSG.length) || '';
// check for more, maybe it was a big chunk
chunkReceived();
}
}
function handleError(error) {
const loggingEvent = {
startTime: new Date(),
categoryName: 'log4js',
level: levels.ERROR,
data: ['A worker log process hung up unexpectedly', error],
remoteAddress: clientSocket.remoteAddress,
remotePort: clientSocket.remotePort
};
actualAppender(loggingEvent);
}
clientSocket.on('data', chunkReceived);
clientSocket.on('end', chunkReceived);
clientSocket.on('error', handleError);
});
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost', (e) => {
debug('(master) master server listening, error was ', e);
// allow the process to exit, if this is the only socket active
server.unref();
});
function app(event) {
debug('(master) log event sent directly to actual appender (local event)');
return actualAppender(event);
}
app.shutdown = function (cb) {
debug('(master) master shutdown called, closing server');
server.close(cb);
};
return app;
}
function workerAppender(config) {
let canWrite = false;
const buffer = [];
let socket;
let shutdownAttempts = 3;
function write(loggingEvent) {
debug('(worker) Writing log event to socket');
socket.write(loggingEvent.serialise(), 'utf8');
socket.write(END_MSG, 'utf8');
}
function emptyBuffer() {
let evt;
debug('(worker) emptying worker buffer');
while ((evt = buffer.shift())) {
write(evt);
}
}
function createSocket() {
debug(
`(worker) worker appender creating socket to ${config.loggerHost || 'localhost'}:${config.loggerPort || 5000}`
);
socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
socket.on('connect', () => {
debug('(worker) worker socket connected');
emptyBuffer();
canWrite = true;
});
socket.on('timeout', socket.end.bind(socket));
socket.on('error', (e) => {
debug('connection error', e);
canWrite = false;
emptyBuffer();
});
socket.on('close', createSocket);
}
createSocket();
function log(loggingEvent) {
if (canWrite) {
write(loggingEvent);
} else {
debug('(worker) worker buffering log event because it cannot write at the moment');
buffer.push(loggingEvent);
}
}
log.shutdown = function (cb) {
debug('(worker) worker shutdown called');
if (buffer.length && shutdownAttempts) {
debug('(worker) worker buffer has items, waiting 100ms to empty');
shutdownAttempts -= 1;
setTimeout(() => {
log.shutdown(cb);
}, 100);
} else {
socket.removeAllListeners('close');
socket.end(cb);
}
};
return log;
}
function createAppender(config, appender, levels) {
if (config.mode === 'master') {
debug('Creating master appender');
return logServer(config, appender, levels);
}
debug('Creating worker appender');
return workerAppender(config);
}
function configure(config, layouts, findAppender, levels) {
let appender;
debug(`configure with mode = ${config.mode}`);
if (config.mode === 'master') {
if (!config.appender) {
debug(`no appender found in config ${config}`);
throw new Error('multiprocess master must have an "appender" defined');
}
debug(`actual appender is ${config.appender}`);
appender = findAppender(config.appender);
if (!appender) {
debug(`actual appender "${config.appender}" not found`);
throw new Error(`multiprocess master appender "${config.appender}" not defined`);
}
}
return createAppender(config, appender, levels);
}
module.exports.configure = configure;
| 1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./lib/categories.js | const debug = require('debug')('log4js:categories');
const configuration = require('./configuration');
const levels = require('./levels');
const appenders = require('./appenders');
const categories = new Map();
/**
* Add inherited config to this category. That includes extra appenders from parent,
* and level, if none is set on this category.
* This is recursive, so each parent also gets loaded with inherited appenders.
* Inheritance is blocked if a category has inherit=false
* @param {*} config
* @param {*} category the child category
* @param {string} categoryName dotted path to category
* @return {void}
*/
function inheritFromParent(config, category, categoryName) {
if (category.inherit === false) return;
const lastDotIndex = categoryName.lastIndexOf('.');
if (lastDotIndex < 0) return; // category is not a child
const parentCategoryName = categoryName.substring(0, lastDotIndex);
let parentCategory = config.categories[parentCategoryName];
if (!parentCategory) {
// parent is missing, so implicitly create it, so that it can inherit from its parents
parentCategory = { inherit: true, appenders: [] };
}
// make sure parent has had its inheritance taken care of before pulling its properties to this child
inheritFromParent(config, parentCategory, parentCategoryName);
// if the parent is not in the config (because we just created it above),
// and it inherited a valid configuration, add it to config.categories
if (!config.categories[parentCategoryName]
&& parentCategory.appenders
&& parentCategory.appenders.length
&& parentCategory.level) {
config.categories[parentCategoryName] = parentCategory;
}
category.appenders = category.appenders || [];
category.level = category.level || parentCategory.level;
// merge in appenders from parent (parent is already holding its inherited appenders)
parentCategory.appenders.forEach((ap) => {
if (!category.appenders.includes(ap)) {
category.appenders.push(ap);
}
});
category.parent = parentCategory;
}
/**
* Walk all categories in the config, and pull down any configuration from parent to child.
* This includes inherited appenders, and level, where level is not set.
* Inheritance is skipped where a category has inherit=false.
* @param {*} config
*/
function addCategoryInheritance(config) {
if (!config.categories) return;
const categoryNames = Object.keys(config.categories);
categoryNames.forEach((name) => {
const category = config.categories[name];
// add inherited appenders and level to this category
inheritFromParent(config, category, name);
});
}
configuration.addPreProcessingListener(config => addCategoryInheritance(config));
configuration.addListener((config) => {
configuration.throwExceptionIf(
config,
configuration.not(configuration.anObject(config.categories)),
'must have a property "categories" of type object.'
);
const categoryNames = Object.keys(config.categories);
configuration.throwExceptionIf(
config,
configuration.not(categoryNames.length),
'must define at least one category.'
);
categoryNames.forEach((name) => {
const category = config.categories[name];
configuration.throwExceptionIf(
config,
[
configuration.not(category.appenders),
configuration.not(category.level)
],
`category "${name}" is not valid (must be an object with properties "appenders" and "level")`
);
configuration.throwExceptionIf(
config,
configuration.not(Array.isArray(category.appenders)),
`category "${name}" is not valid (appenders must be an array of appender names)`
);
configuration.throwExceptionIf(
config,
configuration.not(category.appenders.length),
`category "${name}" is not valid (appenders must contain at least one appender name)`
);
if (Object.prototype.hasOwnProperty.call(category, 'enableCallStack')) {
configuration.throwExceptionIf(
config,
typeof category.enableCallStack !== 'boolean',
`category "${name}" is not valid (enableCallStack must be boolean type)`
);
}
category.appenders.forEach((appender) => {
configuration.throwExceptionIf(
config,
configuration.not(appenders.get(appender)),
`category "${name}" is not valid (appender "${appender}" is not defined)`
);
});
configuration.throwExceptionIf(
config,
configuration.not(levels.getLevel(category.level)),
`category "${name}" is not valid (level "${category.level}" not recognised;`
+ ` valid levels are ${levels.levels.join(', ')})`
);
});
configuration.throwExceptionIf(
config,
configuration.not(config.categories.default),
'must define a "default" category.'
);
});
const setup = (config) => {
categories.clear();
if (!config) {
return;
}
const categoryNames = Object.keys(config.categories);
categoryNames.forEach((name) => {
const category = config.categories[name];
const categoryAppenders = [];
category.appenders.forEach((appender) => {
categoryAppenders.push(appenders.get(appender));
debug(`Creating category ${name}`);
categories.set(
name,
{
appenders: categoryAppenders,
level: levels.getLevel(category.level),
enableCallStack: category.enableCallStack || false
}
);
});
});
};
const init = () => {
setup();
};
init();
configuration.addListener(setup);
const configForCategory = (category) => {
debug(`configForCategory: searching for config for ${category}`);
if (categories.has(category)) {
debug(`configForCategory: ${category} exists in config, returning it`);
return categories.get(category);
}
let sourceCategoryConfig;
if (category.indexOf('.') > 0) {
debug(`configForCategory: ${category} has hierarchy, cloning from parents`);
sourceCategoryConfig = { ...configForCategory(category.substring(0, category.lastIndexOf('.'))) };
} else {
if (!categories.has('default')) {
setup({ categories: { default: { appenders: ['out'], level: 'OFF' } } });
}
debug('configForCategory: cloning default category');
sourceCategoryConfig = { ...categories.get('default') };
}
categories.set(category, sourceCategoryConfig);
return sourceCategoryConfig;
};
const appendersForCategory = category => configForCategory(category).appenders;
const getLevelForCategory = category => configForCategory(category).level;
const setLevelForCategory = (category, level) => {
configForCategory(category).level = level;
};
const getEnableCallStackForCategory = category => configForCategory(category).enableCallStack === true;
const setEnableCallStackForCategory = (category, useCallStack) => {
configForCategory(category).enableCallStack = useCallStack;
};
module.exports = categories;
module.exports = Object.assign(module.exports, {
appendersForCategory,
getLevelForCategory,
setLevelForCategory,
getEnableCallStackForCategory,
setEnableCallStackForCategory,
init,
});
| const debug = require('debug')('log4js:categories');
const configuration = require('./configuration');
const levels = require('./levels');
const appenders = require('./appenders');
const categories = new Map();
/**
* Add inherited config to this category. That includes extra appenders from parent,
* and level, if none is set on this category.
* This is recursive, so each parent also gets loaded with inherited appenders.
* Inheritance is blocked if a category has inherit=false
* @param {*} config
* @param {*} category the child category
* @param {string} categoryName dotted path to category
* @return {void}
*/
function inheritFromParent(config, category, categoryName) {
if (category.inherit === false) return;
const lastDotIndex = categoryName.lastIndexOf('.');
if (lastDotIndex < 0) return; // category is not a child
const parentCategoryName = categoryName.slice(0, lastDotIndex);
let parentCategory = config.categories[parentCategoryName];
if (!parentCategory) {
// parent is missing, so implicitly create it, so that it can inherit from its parents
parentCategory = { inherit: true, appenders: [] };
}
// make sure parent has had its inheritance taken care of before pulling its properties to this child
inheritFromParent(config, parentCategory, parentCategoryName);
// if the parent is not in the config (because we just created it above),
// and it inherited a valid configuration, add it to config.categories
if (!config.categories[parentCategoryName]
&& parentCategory.appenders
&& parentCategory.appenders.length
&& parentCategory.level) {
config.categories[parentCategoryName] = parentCategory;
}
category.appenders = category.appenders || [];
category.level = category.level || parentCategory.level;
// merge in appenders from parent (parent is already holding its inherited appenders)
parentCategory.appenders.forEach((ap) => {
if (!category.appenders.includes(ap)) {
category.appenders.push(ap);
}
});
category.parent = parentCategory;
}
/**
* Walk all categories in the config, and pull down any configuration from parent to child.
* This includes inherited appenders, and level, where level is not set.
* Inheritance is skipped where a category has inherit=false.
* @param {*} config
*/
function addCategoryInheritance(config) {
if (!config.categories) return;
const categoryNames = Object.keys(config.categories);
categoryNames.forEach((name) => {
const category = config.categories[name];
// add inherited appenders and level to this category
inheritFromParent(config, category, name);
});
}
configuration.addPreProcessingListener(config => addCategoryInheritance(config));
configuration.addListener((config) => {
configuration.throwExceptionIf(
config,
configuration.not(configuration.anObject(config.categories)),
'must have a property "categories" of type object.'
);
const categoryNames = Object.keys(config.categories);
configuration.throwExceptionIf(
config,
configuration.not(categoryNames.length),
'must define at least one category.'
);
categoryNames.forEach((name) => {
const category = config.categories[name];
configuration.throwExceptionIf(
config,
[
configuration.not(category.appenders),
configuration.not(category.level)
],
`category "${name}" is not valid (must be an object with properties "appenders" and "level")`
);
configuration.throwExceptionIf(
config,
configuration.not(Array.isArray(category.appenders)),
`category "${name}" is not valid (appenders must be an array of appender names)`
);
configuration.throwExceptionIf(
config,
configuration.not(category.appenders.length),
`category "${name}" is not valid (appenders must contain at least one appender name)`
);
if (Object.prototype.hasOwnProperty.call(category, 'enableCallStack')) {
configuration.throwExceptionIf(
config,
typeof category.enableCallStack !== 'boolean',
`category "${name}" is not valid (enableCallStack must be boolean type)`
);
}
category.appenders.forEach((appender) => {
configuration.throwExceptionIf(
config,
configuration.not(appenders.get(appender)),
`category "${name}" is not valid (appender "${appender}" is not defined)`
);
});
configuration.throwExceptionIf(
config,
configuration.not(levels.getLevel(category.level)),
`category "${name}" is not valid (level "${category.level}" not recognised;`
+ ` valid levels are ${levels.levels.join(', ')})`
);
});
configuration.throwExceptionIf(
config,
configuration.not(config.categories.default),
'must define a "default" category.'
);
});
const setup = (config) => {
categories.clear();
if (!config) {
return;
}
const categoryNames = Object.keys(config.categories);
categoryNames.forEach((name) => {
const category = config.categories[name];
const categoryAppenders = [];
category.appenders.forEach((appender) => {
categoryAppenders.push(appenders.get(appender));
debug(`Creating category ${name}`);
categories.set(
name,
{
appenders: categoryAppenders,
level: levels.getLevel(category.level),
enableCallStack: category.enableCallStack || false
}
);
});
});
};
const init = () => {
setup();
};
init();
configuration.addListener(setup);
const configForCategory = (category) => {
debug(`configForCategory: searching for config for ${category}`);
if (categories.has(category)) {
debug(`configForCategory: ${category} exists in config, returning it`);
return categories.get(category);
}
let sourceCategoryConfig;
if (category.indexOf('.') > 0) {
debug(`configForCategory: ${category} has hierarchy, cloning from parents`);
sourceCategoryConfig = { ...configForCategory(category.slice(0, category.lastIndexOf('.'))) };
} else {
if (!categories.has('default')) {
setup({ categories: { default: { appenders: ['out'], level: 'OFF' } } });
}
debug('configForCategory: cloning default category');
sourceCategoryConfig = { ...categories.get('default') };
}
categories.set(category, sourceCategoryConfig);
return sourceCategoryConfig;
};
const appendersForCategory = category => configForCategory(category).appenders;
const getLevelForCategory = category => configForCategory(category).level;
const setLevelForCategory = (category, level) => {
configForCategory(category).level = level;
};
const getEnableCallStackForCategory = category => configForCategory(category).enableCallStack === true;
const setEnableCallStackForCategory = (category, useCallStack) => {
configForCategory(category).enableCallStack = useCallStack;
};
module.exports = categories;
module.exports = Object.assign(module.exports, {
appendersForCategory,
getLevelForCategory,
setLevelForCategory,
getEnableCallStackForCategory,
setEnableCallStackForCategory,
init,
});
| 1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./lib/layouts.js | const dateFormat = require('date-format');
const os = require('os');
const util = require('util');
const path = require('path');
const debug = require('debug')('log4js:layouts');
const styles = {
// styles
bold: [1, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
// grayscale
white: [37, 39],
grey: [90, 39],
black: [90, 39],
// colors
blue: [34, 39],
cyan: [36, 39],
green: [32, 39],
magenta: [35, 39],
red: [91, 39],
yellow: [33, 39]
};
function colorizeStart(style) {
return style ? `\x1B[${styles[style][0]}m` : '';
}
function colorizeEnd(style) {
return style ? `\x1B[${styles[style][1]}m` : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize(str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
return colorize(
util.format(
'[%s] [%s] %s - ',
dateFormat.asString(loggingEvent.startTime),
loggingEvent.level.toString(),
loggingEvent.categoryName
),
colour
);
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout(loggingEvent) {
return timestampLevelAndCategory(loggingEvent) + util.format(...loggingEvent.data);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout(loggingEvent) {
return timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) + util.format(...loggingEvent.data);
}
function messagePassThroughLayout(loggingEvent) {
return util.format(...loggingEvent.data);
}
function dummyLayout(loggingEvent) {
return loggingEvent.data[0];
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* both padding and truncation can be negative.
* Negative truncation = trunc from end of string
* Positive truncation = trunc from start of string
* Negative padding = pad right
* Positive padding = pad left
*
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in constious formats
* - %% %
* - %n newline
* - %z pid
* - %f filename
* - %l line number
* - %o column postion
* - %s call stack
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* - %X{<tokenname>} add dynamic tokens to your log. Tokens are specified in logger context
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { 'pid' : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @return {Function}
* @param pattern
* @param tokens
* @param timezoneOffset
*
* @authors ['Stephan Strittmatter', 'Jan Schmidle']
*/
function patternLayout(pattern, tokens) {
const TTCC_CONVERSION_PATTERN = '%r %p %c - %m%n';
const regex = /%(-?[0-9]+)?(\.?-?[0-9]+)?([[\]cdhmnprzxXyflos%])(\{([^}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
let loggerName = loggingEvent.categoryName;
if (specifier) {
const precision = parseInt(specifier, 10);
const loggerNameBits = loggerName.split('.');
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits.slice(loggerNameBits.length - precision).join('.');
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
let format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
switch (format) {
case 'ISO8601':
case 'ISO8601_FORMAT':
format = dateFormat.ISO8601_FORMAT;
break;
case 'ISO8601_WITH_TZ_OFFSET':
case 'ISO8601_WITH_TZ_OFFSET_FORMAT':
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
break;
case 'ABSOLUTE':
process.emitWarning(
"Pattern %d{ABSOLUTE} is deprecated in favor of %d{ABSOLUTETIME}. " +
"Please use %d{ABSOLUTETIME} instead.",
"DeprecationWarning", "log4js-node-DEP0003"
);
debug("[log4js-node-DEP0003]",
"DEPRECATION: Pattern %d{ABSOLUTE} is deprecated and replaced by %d{ABSOLUTETIME}.");
// falls through
case 'ABSOLUTETIME':
case 'ABSOLUTETIME_FORMAT':
format = dateFormat.ABSOLUTETIME_FORMAT;
break;
case 'DATE':
process.emitWarning(
"Pattern %d{DATE} is deprecated due to the confusion it causes when used. " +
"Please use %d{DATETIME} instead.",
"DeprecationWarning", "log4js-node-DEP0004"
);
debug("[log4js-node-DEP0004]",
"DEPRECATION: Pattern %d{DATE} is deprecated and replaced by %d{DATETIME}.");
// falls through
case 'DATETIME':
case 'DATETIME_FORMAT':
format = dateFormat.DATETIME_FORMAT;
break;
// no default
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return util.format(...loggingEvent.data);
}
function endOfLine() {
return os.EOL;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime);
}
function startColour(loggingEvent) {
return colorizeStart(loggingEvent.level.colour);
}
function endColour(loggingEvent) {
return colorizeEnd(loggingEvent.level.colour);
}
function percent() {
return '%';
}
function pid(loggingEvent) {
return loggingEvent && loggingEvent.pid ? loggingEvent.pid.toString() : process.pid.toString();
}
function clusterInfo() {
// this used to try to return the master and worker pids,
// but it would never have worked because master pid is not available to workers
// leaving this here to maintain compatibility for patterns
return pid();
}
function userDefined(loggingEvent, specifier) {
if (typeof tokens[specifier] !== 'undefined') {
return typeof tokens[specifier] === 'function' ? tokens[specifier](loggingEvent) : tokens[specifier];
}
return null;
}
function contextDefined(loggingEvent, specifier) {
const resolver = loggingEvent.context[specifier];
if (typeof resolver !== 'undefined') {
return typeof resolver === 'function' ? resolver(loggingEvent) : resolver;
}
return null;
}
function fileName(loggingEvent, specifier) {
let filename = loggingEvent.fileName || '';
if (specifier) {
const fileDepth = parseInt(specifier, 10);
const fileList = filename.split(path.sep);
if (fileList.length > fileDepth) {
filename = fileList.slice(-fileDepth).join(path.sep);
}
}
return filename;
}
function lineNumber(loggingEvent) {
return loggingEvent.lineNumber ? `${loggingEvent.lineNumber}` : '';
}
function columnNumber(loggingEvent) {
return loggingEvent.columnNumber ? `${loggingEvent.columnNumber}` : '';
}
function callStack(loggingEvent) {
return loggingEvent.callStack || '';
}
const replacers = {
c: categoryName,
d: formatAsDate,
h: hostname,
m: formatMessage,
n: endOfLine,
p: logLevel,
r: startTime,
'[': startColour,
']': endColour,
y: clusterInfo,
z: pid,
'%': percent,
x: userDefined,
X: contextDefined,
f: fileName,
l: lineNumber,
o: columnNumber,
s: callStack
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
let len;
if (truncation) {
len = parseInt(truncation.substr(1), 10);
// negative truncate length means truncate from end of string
return len > 0 ? toTruncate.slice(0, len) : toTruncate.slice(len);
}
return toTruncate;
}
function pad(padding, toPad) {
let len;
if (padding) {
if (padding.charAt(0) === '-') {
len = parseInt(padding.substr(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += ' ';
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = ` ${toPad}`;
}
}
}
return toPad;
}
function truncateAndPad(toTruncAndPad, truncation, padding) {
let replacement = toTruncAndPad;
replacement = truncate(truncation, replacement);
replacement = pad(padding, replacement);
return replacement;
}
return function (loggingEvent) {
let formattedString = '';
let result;
let searchString = pattern;
while ((result = regex.exec(searchString)) !== null) {
// const matchedString = result[0];
const padding = result[1];
const truncation = result[2];
const conversionCharacter = result[3];
const specifier = result[5];
const text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += text.toString();
} else {
// Create a raw replacement string based on the conversion
// character and specifier
const replacement = replaceToken(conversionCharacter, loggingEvent, specifier);
formattedString += truncateAndPad(replacement, truncation, padding);
}
searchString = searchString.substr(result.index + result[0].length);
}
return formattedString;
};
}
const layoutMakers = {
messagePassThrough () {
return messagePassThroughLayout;
},
basic () {
return basicLayout;
},
colored () {
return colouredLayout;
},
coloured () {
return colouredLayout;
},
pattern (config) {
return patternLayout(config && config.pattern, config && config.tokens);
},
dummy () {
return dummyLayout;
}
};
module.exports = {
basicLayout,
messagePassThroughLayout,
patternLayout,
colouredLayout,
coloredLayout: colouredLayout,
dummyLayout,
addLayout (name, serializerGenerator) {
layoutMakers[name] = serializerGenerator;
},
layout (name, config) {
return layoutMakers[name] && layoutMakers[name](config);
}
};
| const dateFormat = require('date-format');
const os = require('os');
const util = require('util');
const path = require('path');
const debug = require('debug')('log4js:layouts');
const styles = {
// styles
bold: [1, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
// grayscale
white: [37, 39],
grey: [90, 39],
black: [90, 39],
// colors
blue: [34, 39],
cyan: [36, 39],
green: [32, 39],
magenta: [35, 39],
red: [91, 39],
yellow: [33, 39]
};
function colorizeStart(style) {
return style ? `\x1B[${styles[style][0]}m` : '';
}
function colorizeEnd(style) {
return style ? `\x1B[${styles[style][1]}m` : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize(str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
return colorize(
util.format(
'[%s] [%s] %s - ',
dateFormat.asString(loggingEvent.startTime),
loggingEvent.level.toString(),
loggingEvent.categoryName
),
colour
);
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout(loggingEvent) {
return timestampLevelAndCategory(loggingEvent) + util.format(...loggingEvent.data);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout(loggingEvent) {
return timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) + util.format(...loggingEvent.data);
}
function messagePassThroughLayout(loggingEvent) {
return util.format(...loggingEvent.data);
}
function dummyLayout(loggingEvent) {
return loggingEvent.data[0];
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* both padding and truncation can be negative.
* Negative truncation = trunc from end of string
* Positive truncation = trunc from start of string
* Negative padding = pad right
* Positive padding = pad left
*
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in constious formats
* - %% %
* - %n newline
* - %z pid
* - %f filename
* - %l line number
* - %o column postion
* - %s call stack
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* - %X{<tokenname>} add dynamic tokens to your log. Tokens are specified in logger context
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { 'pid' : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @return {Function}
* @param pattern
* @param tokens
* @param timezoneOffset
*
* @authors ['Stephan Strittmatter', 'Jan Schmidle']
*/
function patternLayout(pattern, tokens) {
const TTCC_CONVERSION_PATTERN = '%r %p %c - %m%n';
const regex = /%(-?[0-9]+)?(\.?-?[0-9]+)?([[\]cdhmnprzxXyflos%])(\{([^}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
let loggerName = loggingEvent.categoryName;
if (specifier) {
const precision = parseInt(specifier, 10);
const loggerNameBits = loggerName.split('.');
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits.slice(loggerNameBits.length - precision).join('.');
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
let format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
switch (format) {
case 'ISO8601':
case 'ISO8601_FORMAT':
format = dateFormat.ISO8601_FORMAT;
break;
case 'ISO8601_WITH_TZ_OFFSET':
case 'ISO8601_WITH_TZ_OFFSET_FORMAT':
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
break;
case 'ABSOLUTE':
process.emitWarning(
"Pattern %d{ABSOLUTE} is deprecated in favor of %d{ABSOLUTETIME}. " +
"Please use %d{ABSOLUTETIME} instead.",
"DeprecationWarning", "log4js-node-DEP0003"
);
debug("[log4js-node-DEP0003]",
"DEPRECATION: Pattern %d{ABSOLUTE} is deprecated and replaced by %d{ABSOLUTETIME}.");
// falls through
case 'ABSOLUTETIME':
case 'ABSOLUTETIME_FORMAT':
format = dateFormat.ABSOLUTETIME_FORMAT;
break;
case 'DATE':
process.emitWarning(
"Pattern %d{DATE} is deprecated due to the confusion it causes when used. " +
"Please use %d{DATETIME} instead.",
"DeprecationWarning", "log4js-node-DEP0004"
);
debug("[log4js-node-DEP0004]",
"DEPRECATION: Pattern %d{DATE} is deprecated and replaced by %d{DATETIME}.");
// falls through
case 'DATETIME':
case 'DATETIME_FORMAT':
format = dateFormat.DATETIME_FORMAT;
break;
// no default
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return util.format(...loggingEvent.data);
}
function endOfLine() {
return os.EOL;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime);
}
function startColour(loggingEvent) {
return colorizeStart(loggingEvent.level.colour);
}
function endColour(loggingEvent) {
return colorizeEnd(loggingEvent.level.colour);
}
function percent() {
return '%';
}
function pid(loggingEvent) {
return loggingEvent && loggingEvent.pid ? loggingEvent.pid.toString() : process.pid.toString();
}
function clusterInfo() {
// this used to try to return the master and worker pids,
// but it would never have worked because master pid is not available to workers
// leaving this here to maintain compatibility for patterns
return pid();
}
function userDefined(loggingEvent, specifier) {
if (typeof tokens[specifier] !== 'undefined') {
return typeof tokens[specifier] === 'function' ? tokens[specifier](loggingEvent) : tokens[specifier];
}
return null;
}
function contextDefined(loggingEvent, specifier) {
const resolver = loggingEvent.context[specifier];
if (typeof resolver !== 'undefined') {
return typeof resolver === 'function' ? resolver(loggingEvent) : resolver;
}
return null;
}
function fileName(loggingEvent, specifier) {
let filename = loggingEvent.fileName || '';
if (specifier) {
const fileDepth = parseInt(specifier, 10);
const fileList = filename.split(path.sep);
if (fileList.length > fileDepth) {
filename = fileList.slice(-fileDepth).join(path.sep);
}
}
return filename;
}
function lineNumber(loggingEvent) {
return loggingEvent.lineNumber ? `${loggingEvent.lineNumber}` : '';
}
function columnNumber(loggingEvent) {
return loggingEvent.columnNumber ? `${loggingEvent.columnNumber}` : '';
}
function callStack(loggingEvent) {
return loggingEvent.callStack || '';
}
const replacers = {
c: categoryName,
d: formatAsDate,
h: hostname,
m: formatMessage,
n: endOfLine,
p: logLevel,
r: startTime,
'[': startColour,
']': endColour,
y: clusterInfo,
z: pid,
'%': percent,
x: userDefined,
X: contextDefined,
f: fileName,
l: lineNumber,
o: columnNumber,
s: callStack
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
let len;
if (truncation) {
len = parseInt(truncation.slice(1), 10);
// negative truncate length means truncate from end of string
return len > 0 ? toTruncate.slice(0, len) : toTruncate.slice(len);
}
return toTruncate;
}
function pad(padding, toPad) {
let len;
if (padding) {
if (padding.charAt(0) === '-') {
len = parseInt(padding.slice(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += ' ';
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = ` ${toPad}`;
}
}
}
return toPad;
}
function truncateAndPad(toTruncAndPad, truncation, padding) {
let replacement = toTruncAndPad;
replacement = truncate(truncation, replacement);
replacement = pad(padding, replacement);
return replacement;
}
return function (loggingEvent) {
let formattedString = '';
let result;
let searchString = pattern;
while ((result = regex.exec(searchString)) !== null) {
// const matchedString = result[0];
const padding = result[1];
const truncation = result[2];
const conversionCharacter = result[3];
const specifier = result[5];
const text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += text.toString();
} else {
// Create a raw replacement string based on the conversion
// character and specifier
const replacement = replaceToken(conversionCharacter, loggingEvent, specifier);
formattedString += truncateAndPad(replacement, truncation, padding);
}
searchString = searchString.slice(result.index + result[0].length);
}
return formattedString;
};
}
const layoutMakers = {
messagePassThrough () {
return messagePassThroughLayout;
},
basic () {
return basicLayout;
},
colored () {
return colouredLayout;
},
coloured () {
return colouredLayout;
},
pattern (config) {
return patternLayout(config && config.pattern, config && config.tokens);
},
dummy () {
return dummyLayout;
}
};
module.exports = {
basicLayout,
messagePassThroughLayout,
patternLayout,
colouredLayout,
coloredLayout: colouredLayout,
dummyLayout,
addLayout (name, serializerGenerator) {
layoutMakers[name] = serializerGenerator;
},
layout (name, config) {
return layoutMakers[name] && layoutMakers[name](config);
}
};
| 1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./test/tap/layouts-test.js | const { test } = require("tap");
const debug = require("debug");
const os = require("os");
const path = require("path");
const { EOL } = os;
// used for patternLayout tests.
function testPattern(assert, layout, event, tokens, pattern, value) {
assert.equal(layout(pattern, tokens)(event), value);
}
test("log4js layouts", batch => {
batch.test("colouredLayout", t => {
const layout = require("../../lib/layouts").colouredLayout;
t.test("should apply level colour codes to output", assert => {
const output = layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString() {
return "ERROR";
},
colour: "red"
}
});
assert.equal(
output,
"\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense"
);
assert.end();
});
t.test("should support the console.log format for the message", assert => {
const output = layout({
data: ["thing %d", 2],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString() {
return "ERROR";
},
colour: "red"
}
});
assert.equal(
output,
"\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2"
);
assert.end();
});
t.end();
});
batch.test("messagePassThroughLayout", t => {
const layout = require("../../lib/layouts").messagePassThroughLayout;
t.equal(
layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"nonsense",
"should take a logevent and output only the message"
);
t.equal(
layout({
data: ["thing %d", 1, "cheese"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"thing 1 cheese",
"should support the console.log format for the message"
);
t.equal(
layout({
data: [{ thing: 1 }],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"{ thing: 1 }",
"should output the first item even if it is not a string"
);
t.match(
layout({
data: [new Error()],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
/at (Test\.batch\.test\.t|Test\.<anonymous>)\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
"regexp did not return a match - should print the stacks of a passed error objects"
);
t.test("with passed augmented errors", assert => {
const e = new Error("My Unique Error Message");
e.augmented = "My Unique attribute value";
e.augObj = { at1: "at2" };
const layoutOutput = layout({
data: [e],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
});
assert.match(
layoutOutput,
/Error: My Unique Error Message/,
"should print the contained error message"
);
assert.match(
layoutOutput,
/augmented:\s'My Unique attribute value'/,
"should print error augmented string attributes"
);
assert.match(
layoutOutput,
/augObj:\s\{ at1: 'at2' \}/,
"should print error augmented object attributes"
);
assert.end();
});
t.end();
});
batch.test("basicLayout", t => {
const layout = require("../../lib/layouts").basicLayout;
const event = {
data: ["this is a test"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "tests",
level: {
toString() {
return "DEBUG";
}
}
};
t.equal(
layout(event),
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test"
);
t.test(
"should output a stacktrace, message if the event has an error attached",
assert => {
let i;
const error = new Error("Some made-up error");
const stack = error.stack.split(/\n/);
event.data = ["this is a test", error];
const output = layout(event);
const lines = output.split(/\n/);
assert.equal(lines.length, stack.length);
assert.equal(
lines[0],
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error"
);
for (i = 1; i < stack.length; i++) {
assert.equal(lines[i], stack[i]);
}
assert.end();
}
);
t.test(
"should output any extra data in the log event as util.inspect strings",
assert => {
event.data = [
"this is a test",
{
name: "Cheese",
message: "Gorgonzola smells."
}
];
const output = layout(event);
assert.equal(
output,
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test " +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
assert.end();
}
);
t.end();
});
batch.test("dummyLayout", t => {
const layout = require("../../lib/layouts").dummyLayout;
t.test("should output just the first element of the log data", assert => {
const event = {
data: ["this is the first value", "this is not"],
startTime: new Date("2010-12-05 14:18:30.045"),
categoryName: "multiple.levels.of.tests",
level: {
toString() {
return "DEBUG";
},
colour: "cyan"
}
};
assert.equal(layout(event), "this is the first value");
assert.end();
});
t.end();
});
batch.test("patternLayout", t => {
const originalListener = process.listeners("warning")[process.listeners("warning").length - 1];
const warningListener = error => {
if (error.name === "DeprecationWarning") {
if (error.code.startsWith("log4js-node-DEP0003") || error.code.startsWith("log4js-node-DEP0004")) {
return;
}
}
originalListener(error);
};
process.off("warning", originalListener);
process.on("warning", warningListener);
const debugWasEnabled = debug.enabled("log4js:layouts");
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
const originalNamespace = debug.disable();
debug.enable(`${originalNamespace}, log4js:layouts`);
batch.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off("warning", warningListener);
process.on("warning", originalListener);
});
process.stderr.write = originalWrite;
debug.enable(originalNamespace);
});
const tokens = {
testString: "testStringToken",
testFunction() {
return "testFunctionToken";
},
fnThatUsesLogEvent(logEvent) {
return logEvent.level.toString();
}
};
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
" at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)"; // eslint-disable-line max-len
const fileName = path.normalize("/log4js-node/test/tap/layouts-test.js");
const lineNumber = 1;
const columnNumber = 14;
const event = {
data: ["this is a test"],
startTime: new Date("2010-12-05 14:18:30.045"),
categoryName: "multiple.levels.of.tests",
level: {
toString() {
return "DEBUG";
},
colour: "cyan"
},
context: tokens,
// location
callStack,
fileName,
lineNumber,
columnNumber
};
event.startTime.getTimezoneOffset = () => -600;
const layout = require("../../lib/layouts").patternLayout;
t.test('should default to "time logLevel loggerName - message"', assert => {
testPattern(
assert,
layout,
event,
tokens,
null,
`14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`
);
assert.end();
});
t.test("%r should output time only", assert => {
testPattern(assert, layout, event, tokens, "%r", "14:18:30");
assert.end();
});
t.test("%p should output the log level", assert => {
testPattern(assert, layout, event, tokens, "%p", "DEBUG");
assert.end();
});
t.test("%c should output the log category", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%c",
"multiple.levels.of.tests"
);
assert.end();
});
t.test("%m should output the log data", assert => {
testPattern(assert, layout, event, tokens, "%m", "this is a test");
assert.end();
});
t.test("%n should output a new line", assert => {
testPattern(assert, layout, event, tokens, "%n", EOL);
assert.end();
});
t.test("%h should output hostname", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%h",
os.hostname().toString()
);
assert.end();
});
t.test("%z should output pid", assert => {
testPattern(assert, layout, event, tokens, "%z", process.pid.toString());
assert.end();
});
t.test("%z should pick up pid from log event if present", assert => {
event.pid = "1234";
testPattern(assert, layout, event, tokens, "%z", "1234");
delete event.pid;
assert.end();
});
t.test("%y should output pid (was cluster info)", assert => {
testPattern(assert, layout, event, tokens, "%y", process.pid.toString());
assert.end();
});
t.test(
"%c should handle category names like java-style package names",
assert => {
testPattern(assert, layout, event, tokens, "%c{1}", "tests");
testPattern(assert, layout, event, tokens, "%c{2}", "of.tests");
testPattern(assert, layout, event, tokens, "%c{3}", "levels.of.tests");
testPattern(
assert,
layout,
event,
tokens,
"%c{4}",
"multiple.levels.of.tests"
);
testPattern(
assert,
layout,
event,
tokens,
"%c{5}",
"multiple.levels.of.tests"
);
testPattern(
assert,
layout,
event,
tokens,
"%c{99}",
"multiple.levels.of.tests"
);
assert.end();
}
);
t.test("%d should output the date in ISO8601 format", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%d",
"2010-12-05T14:18:30.045"
);
assert.end();
});
t.test("%d should allow for format specification", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%d{ISO8601}",
"2010-12-05T14:18:30.045"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{ISO8601_WITH_TZ_OFFSET}",
"2010-12-05T14:18:30.045+10:00"
);
const DEP0003 = debugLogs.filter((e) => e.indexOf("log4js-node-DEP0003") > -1).length;
testPattern(
assert,
layout,
event,
tokens,
"%d{ABSOLUTE}", // deprecated
"14:18:30.045"
);
assert.equal(
debugLogs.filter((e) => e.indexOf("log4js-node-DEP0003") > -1).length,
DEP0003 + 1,
"deprecation log4js-node-DEP0003 emitted"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{ABSOLUTETIME}",
"14:18:30.045"
);
const DEP0004 = debugLogs.filter((e) => e.indexOf("log4js-node-DEP0004") > -1).length;
testPattern(
assert,
layout,
event,
tokens,
"%d{DATE}", // deprecated
"05 12 2010 14:18:30.045"
);
assert.equal(
debugLogs.filter((e) => e.indexOf("log4js-node-DEP0004") > -1).length,
DEP0004 + 1,
"deprecation log4js-node-DEP0004 emitted"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{DATETIME}",
"05 12 2010 14:18:30.045"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yy MM dd hh mm ss}",
"10 12 05 14 18 30"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yyyy MM dd}",
"2010 12 05"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yyyy MM dd hh mm ss SSS}",
"2010 12 05 14 18 30 045"
);
assert.end();
});
t.test("%% should output %", assert => {
testPattern(assert, layout, event, tokens, "%%", "%");
assert.end();
});
t.test("%f should output filename", assert => {
testPattern(assert, layout, event, tokens, "%f", fileName);
assert.end();
});
t.test("%f should handle filename depth", assert => {
testPattern(assert, layout, event, tokens, "%f{1}", "layouts-test.js");
testPattern(
assert,
layout,
event,
tokens,
"%f{2}",
path.join("tap", "layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{3}",
path.join("test", "tap", "layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{4}",
path.join("log4js-node","test","tap","layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{5}",
path.join("/log4js-node","test","tap","layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{99}",
path.join("/log4js-node","test","tap","layouts-test.js")
);
assert.end();
});
t.test("%f should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%.5f", fileName.substring(0, 5));
testPattern(assert, layout, event, tokens, "%20f{1}", " layouts-test.js");
testPattern(assert, layout, event, tokens, "%30.30f{2}", ` ${ path.join("tap","layouts-test.js")}`);
testPattern(assert, layout, event, tokens, "%10.-5f{1}", " st.js");
assert.end();
});
t.test("%l should output line number", assert => {
testPattern(assert, layout, event, tokens, "%l", lineNumber.toString());
assert.end();
});
t.test("%l should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%5.10l", " 1");
testPattern(assert, layout, event, tokens, "%.5l", "1");
testPattern(assert, layout, event, tokens, "%.-5l", "1");
testPattern(assert, layout, event, tokens, "%-5l", "1 ");
assert.end();
});
t.test("%o should output column postion", assert => {
testPattern(assert, layout, event, tokens, "%o", columnNumber.toString());
assert.end();
});
t.test("%o should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%5.10o", " 14");
testPattern(assert, layout, event, tokens, "%.5o", "14");
testPattern(assert, layout, event, tokens, "%.1o", "1");
testPattern(assert, layout, event, tokens, "%.-1o", "4");
testPattern(assert, layout, event, tokens, "%-5o", "14 ");
assert.end();
});
t.test("%s should output stack", assert => {
testPattern(assert, layout, event, tokens, "%s", callStack);
assert.end();
});
t.test("%f should output empty string when fileName not exist", assert => {
delete event.fileName;
testPattern(assert, layout, event, tokens, "%f", "");
assert.end();
});
t.test(
"%l should output empty string when lineNumber not exist",
assert => {
delete event.lineNumber;
testPattern(assert, layout, event, tokens, "%l", "");
assert.end();
}
);
t.test(
"%o should output empty string when columnNumber not exist",
assert => {
delete event.columnNumber;
testPattern(assert, layout, event, tokens, "%o", "");
assert.end();
}
);
t.test("%s should output empty string when callStack not exist", assert => {
delete event.callStack;
testPattern(assert, layout, event, tokens, "%s", "");
assert.end();
});
t.test("should output anything not preceded by % as literal", assert => {
testPattern(
assert,
layout,
event,
tokens,
"blah blah blah",
"blah blah blah"
);
assert.end();
});
t.test(
"should output the original string if no replacer matches the token",
assert => {
testPattern(assert, layout, event, tokens, "%a{3}", "a{3}");
assert.end();
}
);
t.test("should handle complicated patterns", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%m%n %c{2} at %d{ABSOLUTE} cheese %p%n", // deprecated
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
testPattern(
assert,
layout,
event,
tokens,
"%m%n %c{2} at %d{ABSOLUTETIME} cheese %p%n",
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
assert.end();
});
t.test("should truncate fields if specified", assert => {
testPattern(assert, layout, event, tokens, "%.4m", "this");
testPattern(assert, layout, event, tokens, "%.7m", "this is");
testPattern(assert, layout, event, tokens, "%.9m", "this is a");
testPattern(assert, layout, event, tokens, "%.14m", "this is a test");
testPattern(
assert,
layout,
event,
tokens,
"%.2919102m",
"this is a test"
);
testPattern(assert, layout, event, tokens, "%.-4m", "test");
assert.end();
});
t.test("should pad fields if specified", assert => {
testPattern(assert, layout, event, tokens, "%10p", " DEBUG");
testPattern(assert, layout, event, tokens, "%8p", " DEBUG");
testPattern(assert, layout, event, tokens, "%6p", " DEBUG");
testPattern(assert, layout, event, tokens, "%4p", "DEBUG");
testPattern(assert, layout, event, tokens, "%-4p", "DEBUG");
testPattern(assert, layout, event, tokens, "%-6p", "DEBUG ");
testPattern(assert, layout, event, tokens, "%-8p", "DEBUG ");
testPattern(assert, layout, event, tokens, "%-10p", "DEBUG ");
assert.end();
});
t.test("%[%r%] should output colored time", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%[%r%]",
"\x1B[36m14:18:30\x1B[39m"
);
assert.end();
});
t.test(
"%x{testString} should output the string stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{testString}",
"testStringToken"
);
assert.end();
}
);
t.test(
"%x{testFunction} should output the result of the function stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{testFunction}",
"testFunctionToken"
);
assert.end();
}
);
t.test(
"%x{doesNotExist} should output the string stored in tokens",
assert => {
testPattern(assert, layout, event, tokens, "%x{doesNotExist}", "null");
assert.end();
}
);
t.test(
"%x{fnThatUsesLogEvent} should be able to use the logEvent",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{fnThatUsesLogEvent}",
"DEBUG"
);
assert.end();
}
);
t.test("%x should output the string stored in tokens", assert => {
testPattern(assert, layout, event, tokens, "%x", "null");
assert.end();
});
t.test(
"%X{testString} should output the string stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{testString}",
"testStringToken"
);
assert.end();
}
);
t.test(
"%X{testFunction} should output the result of the function stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{testFunction}",
"testFunctionToken"
);
assert.end();
}
);
t.test(
"%X{doesNotExist} should output the string stored in tokens",
assert => {
testPattern(assert, layout, event, {}, "%X{doesNotExist}", "null");
assert.end();
}
);
t.test(
"%X{fnThatUsesLogEvent} should be able to use the logEvent",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{fnThatUsesLogEvent}",
"DEBUG"
);
assert.end();
}
);
t.test("%X should output the string stored in tokens", assert => {
testPattern(assert, layout, event, {}, "%X", "null");
assert.end();
});
t.end();
});
batch.test("layout makers", t => {
const layouts = require("../../lib/layouts");
t.test("should have a maker for each layout", assert => {
assert.ok(layouts.layout("messagePassThrough"));
assert.ok(layouts.layout("basic"));
assert.ok(layouts.layout("colored"));
assert.ok(layouts.layout("coloured"));
assert.ok(layouts.layout("pattern"));
assert.ok(layouts.layout("dummy"));
assert.end();
});
t.test(
"layout pattern maker should pass pattern and tokens to layout from config",
assert => {
let layout = layouts.layout("pattern", { pattern: "%%" });
assert.equal(layout({}), "%");
layout = layouts.layout("pattern", {
pattern: "%x{testStringToken}",
tokens: { testStringToken: "cheese" }
});
assert.equal(layout({}), "cheese");
assert.end();
}
);
t.end();
});
batch.test("add layout", t => {
const layouts = require("../../lib/layouts");
t.test("should be able to add a layout", assert => {
layouts.addLayout("test_layout", config => {
assert.equal(config, "test_config");
return function(logEvent) {
return `TEST LAYOUT >${logEvent.data}`;
};
});
const serializer = layouts.layout("test_layout", "test_config");
assert.ok(serializer);
assert.equal(serializer({ data: "INPUT" }), "TEST LAYOUT >INPUT");
assert.end();
});
t.end();
});
batch.end();
});
| const { test } = require("tap");
const debug = require("debug");
const os = require("os");
const path = require("path");
const { EOL } = os;
// used for patternLayout tests.
function testPattern(assert, layout, event, tokens, pattern, value) {
assert.equal(layout(pattern, tokens)(event), value);
}
test("log4js layouts", batch => {
batch.test("colouredLayout", t => {
const layout = require("../../lib/layouts").colouredLayout;
t.test("should apply level colour codes to output", assert => {
const output = layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString() {
return "ERROR";
},
colour: "red"
}
});
assert.equal(
output,
"\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense"
);
assert.end();
});
t.test("should support the console.log format for the message", assert => {
const output = layout({
data: ["thing %d", 2],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString() {
return "ERROR";
},
colour: "red"
}
});
assert.equal(
output,
"\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2"
);
assert.end();
});
t.end();
});
batch.test("messagePassThroughLayout", t => {
const layout = require("../../lib/layouts").messagePassThroughLayout;
t.equal(
layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"nonsense",
"should take a logevent and output only the message"
);
t.equal(
layout({
data: ["thing %d", 1, "cheese"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"thing 1 cheese",
"should support the console.log format for the message"
);
t.equal(
layout({
data: [{ thing: 1 }],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
"{ thing: 1 }",
"should output the first item even if it is not a string"
);
t.match(
layout({
data: [new Error()],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
}),
/at (Test\.batch\.test\.t|Test\.<anonymous>)\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
"regexp did not return a match - should print the stacks of a passed error objects"
);
t.test("with passed augmented errors", assert => {
const e = new Error("My Unique Error Message");
e.augmented = "My Unique attribute value";
e.augObj = { at1: "at2" };
const layoutOutput = layout({
data: [e],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString() {
return "ERROR";
}
}
});
assert.match(
layoutOutput,
/Error: My Unique Error Message/,
"should print the contained error message"
);
assert.match(
layoutOutput,
/augmented:\s'My Unique attribute value'/,
"should print error augmented string attributes"
);
assert.match(
layoutOutput,
/augObj:\s\{ at1: 'at2' \}/,
"should print error augmented object attributes"
);
assert.end();
});
t.end();
});
batch.test("basicLayout", t => {
const layout = require("../../lib/layouts").basicLayout;
const event = {
data: ["this is a test"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "tests",
level: {
toString() {
return "DEBUG";
}
}
};
t.equal(
layout(event),
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test"
);
t.test(
"should output a stacktrace, message if the event has an error attached",
assert => {
let i;
const error = new Error("Some made-up error");
const stack = error.stack.split(/\n/);
event.data = ["this is a test", error];
const output = layout(event);
const lines = output.split(/\n/);
assert.equal(lines.length, stack.length);
assert.equal(
lines[0],
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error"
);
for (i = 1; i < stack.length; i++) {
assert.equal(lines[i], stack[i]);
}
assert.end();
}
);
t.test(
"should output any extra data in the log event as util.inspect strings",
assert => {
event.data = [
"this is a test",
{
name: "Cheese",
message: "Gorgonzola smells."
}
];
const output = layout(event);
assert.equal(
output,
"[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test " +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
assert.end();
}
);
t.end();
});
batch.test("dummyLayout", t => {
const layout = require("../../lib/layouts").dummyLayout;
t.test("should output just the first element of the log data", assert => {
const event = {
data: ["this is the first value", "this is not"],
startTime: new Date("2010-12-05 14:18:30.045"),
categoryName: "multiple.levels.of.tests",
level: {
toString() {
return "DEBUG";
},
colour: "cyan"
}
};
assert.equal(layout(event), "this is the first value");
assert.end();
});
t.end();
});
batch.test("patternLayout", t => {
const originalListener = process.listeners("warning")[process.listeners("warning").length - 1];
const warningListener = error => {
if (error.name === "DeprecationWarning") {
if (error.code.startsWith("log4js-node-DEP0003") || error.code.startsWith("log4js-node-DEP0004")) {
return;
}
}
originalListener(error);
};
process.off("warning", originalListener);
process.on("warning", warningListener);
const debugWasEnabled = debug.enabled("log4js:layouts");
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
const originalNamespace = debug.disable();
debug.enable(`${originalNamespace}, log4js:layouts`);
batch.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off("warning", warningListener);
process.on("warning", originalListener);
});
process.stderr.write = originalWrite;
debug.enable(originalNamespace);
});
const tokens = {
testString: "testStringToken",
testFunction() {
return "testFunctionToken";
},
fnThatUsesLogEvent(logEvent) {
return logEvent.level.toString();
}
};
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
" at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)"; // eslint-disable-line max-len
const fileName = path.normalize("/log4js-node/test/tap/layouts-test.js");
const lineNumber = 1;
const columnNumber = 14;
const event = {
data: ["this is a test"],
startTime: new Date("2010-12-05 14:18:30.045"),
categoryName: "multiple.levels.of.tests",
level: {
toString() {
return "DEBUG";
},
colour: "cyan"
},
context: tokens,
// location
callStack,
fileName,
lineNumber,
columnNumber
};
event.startTime.getTimezoneOffset = () => -600;
const layout = require("../../lib/layouts").patternLayout;
t.test('should default to "time logLevel loggerName - message"', assert => {
testPattern(
assert,
layout,
event,
tokens,
null,
`14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`
);
assert.end();
});
t.test("%r should output time only", assert => {
testPattern(assert, layout, event, tokens, "%r", "14:18:30");
assert.end();
});
t.test("%p should output the log level", assert => {
testPattern(assert, layout, event, tokens, "%p", "DEBUG");
assert.end();
});
t.test("%c should output the log category", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%c",
"multiple.levels.of.tests"
);
assert.end();
});
t.test("%m should output the log data", assert => {
testPattern(assert, layout, event, tokens, "%m", "this is a test");
assert.end();
});
t.test("%n should output a new line", assert => {
testPattern(assert, layout, event, tokens, "%n", EOL);
assert.end();
});
t.test("%h should output hostname", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%h",
os.hostname().toString()
);
assert.end();
});
t.test("%z should output pid", assert => {
testPattern(assert, layout, event, tokens, "%z", process.pid.toString());
assert.end();
});
t.test("%z should pick up pid from log event if present", assert => {
event.pid = "1234";
testPattern(assert, layout, event, tokens, "%z", "1234");
delete event.pid;
assert.end();
});
t.test("%y should output pid (was cluster info)", assert => {
testPattern(assert, layout, event, tokens, "%y", process.pid.toString());
assert.end();
});
t.test(
"%c should handle category names like java-style package names",
assert => {
testPattern(assert, layout, event, tokens, "%c{1}", "tests");
testPattern(assert, layout, event, tokens, "%c{2}", "of.tests");
testPattern(assert, layout, event, tokens, "%c{3}", "levels.of.tests");
testPattern(
assert,
layout,
event,
tokens,
"%c{4}",
"multiple.levels.of.tests"
);
testPattern(
assert,
layout,
event,
tokens,
"%c{5}",
"multiple.levels.of.tests"
);
testPattern(
assert,
layout,
event,
tokens,
"%c{99}",
"multiple.levels.of.tests"
);
assert.end();
}
);
t.test("%d should output the date in ISO8601 format", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%d",
"2010-12-05T14:18:30.045"
);
assert.end();
});
t.test("%d should allow for format specification", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%d{ISO8601}",
"2010-12-05T14:18:30.045"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{ISO8601_WITH_TZ_OFFSET}",
"2010-12-05T14:18:30.045+10:00"
);
const DEP0003 = debugLogs.filter((e) => e.indexOf("log4js-node-DEP0003") > -1).length;
testPattern(
assert,
layout,
event,
tokens,
"%d{ABSOLUTE}", // deprecated
"14:18:30.045"
);
assert.equal(
debugLogs.filter((e) => e.indexOf("log4js-node-DEP0003") > -1).length,
DEP0003 + 1,
"deprecation log4js-node-DEP0003 emitted"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{ABSOLUTETIME}",
"14:18:30.045"
);
const DEP0004 = debugLogs.filter((e) => e.indexOf("log4js-node-DEP0004") > -1).length;
testPattern(
assert,
layout,
event,
tokens,
"%d{DATE}", // deprecated
"05 12 2010 14:18:30.045"
);
assert.equal(
debugLogs.filter((e) => e.indexOf("log4js-node-DEP0004") > -1).length,
DEP0004 + 1,
"deprecation log4js-node-DEP0004 emitted"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{DATETIME}",
"05 12 2010 14:18:30.045"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yy MM dd hh mm ss}",
"10 12 05 14 18 30"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yyyy MM dd}",
"2010 12 05"
);
testPattern(
assert,
layout,
event,
tokens,
"%d{yyyy MM dd hh mm ss SSS}",
"2010 12 05 14 18 30 045"
);
assert.end();
});
t.test("%% should output %", assert => {
testPattern(assert, layout, event, tokens, "%%", "%");
assert.end();
});
t.test("%f should output filename", assert => {
testPattern(assert, layout, event, tokens, "%f", fileName);
assert.end();
});
t.test("%f should handle filename depth", assert => {
testPattern(assert, layout, event, tokens, "%f{1}", "layouts-test.js");
testPattern(
assert,
layout,
event,
tokens,
"%f{2}",
path.join("tap", "layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{3}",
path.join("test", "tap", "layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{4}",
path.join("log4js-node","test","tap","layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{5}",
path.join("/log4js-node","test","tap","layouts-test.js")
);
testPattern(
assert,
layout,
event,
tokens,
"%f{99}",
path.join("/log4js-node","test","tap","layouts-test.js")
);
assert.end();
});
t.test("%f should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%.5f", fileName.slice(0, 5));
testPattern(assert, layout, event, tokens, "%20f{1}", " layouts-test.js");
testPattern(assert, layout, event, tokens, "%30.30f{2}", ` ${ path.join("tap","layouts-test.js")}`);
testPattern(assert, layout, event, tokens, "%10.-5f{1}", " st.js");
assert.end();
});
t.test("%l should output line number", assert => {
testPattern(assert, layout, event, tokens, "%l", lineNumber.toString());
assert.end();
});
t.test("%l should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%5.10l", " 1");
testPattern(assert, layout, event, tokens, "%.5l", "1");
testPattern(assert, layout, event, tokens, "%.-5l", "1");
testPattern(assert, layout, event, tokens, "%-5l", "1 ");
assert.end();
});
t.test("%o should output column postion", assert => {
testPattern(assert, layout, event, tokens, "%o", columnNumber.toString());
assert.end();
});
t.test("%o should accept truncation and padding", assert => {
testPattern(assert, layout, event, tokens, "%5.10o", " 14");
testPattern(assert, layout, event, tokens, "%.5o", "14");
testPattern(assert, layout, event, tokens, "%.1o", "1");
testPattern(assert, layout, event, tokens, "%.-1o", "4");
testPattern(assert, layout, event, tokens, "%-5o", "14 ");
assert.end();
});
t.test("%s should output stack", assert => {
testPattern(assert, layout, event, tokens, "%s", callStack);
assert.end();
});
t.test("%f should output empty string when fileName not exist", assert => {
delete event.fileName;
testPattern(assert, layout, event, tokens, "%f", "");
assert.end();
});
t.test(
"%l should output empty string when lineNumber not exist",
assert => {
delete event.lineNumber;
testPattern(assert, layout, event, tokens, "%l", "");
assert.end();
}
);
t.test(
"%o should output empty string when columnNumber not exist",
assert => {
delete event.columnNumber;
testPattern(assert, layout, event, tokens, "%o", "");
assert.end();
}
);
t.test("%s should output empty string when callStack not exist", assert => {
delete event.callStack;
testPattern(assert, layout, event, tokens, "%s", "");
assert.end();
});
t.test("should output anything not preceded by % as literal", assert => {
testPattern(
assert,
layout,
event,
tokens,
"blah blah blah",
"blah blah blah"
);
assert.end();
});
t.test(
"should output the original string if no replacer matches the token",
assert => {
testPattern(assert, layout, event, tokens, "%a{3}", "a{3}");
assert.end();
}
);
t.test("should handle complicated patterns", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%m%n %c{2} at %d{ABSOLUTE} cheese %p%n", // deprecated
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
testPattern(
assert,
layout,
event,
tokens,
"%m%n %c{2} at %d{ABSOLUTETIME} cheese %p%n",
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
assert.end();
});
t.test("should truncate fields if specified", assert => {
testPattern(assert, layout, event, tokens, "%.4m", "this");
testPattern(assert, layout, event, tokens, "%.7m", "this is");
testPattern(assert, layout, event, tokens, "%.9m", "this is a");
testPattern(assert, layout, event, tokens, "%.14m", "this is a test");
testPattern(
assert,
layout,
event,
tokens,
"%.2919102m",
"this is a test"
);
testPattern(assert, layout, event, tokens, "%.-4m", "test");
assert.end();
});
t.test("should pad fields if specified", assert => {
testPattern(assert, layout, event, tokens, "%10p", " DEBUG");
testPattern(assert, layout, event, tokens, "%8p", " DEBUG");
testPattern(assert, layout, event, tokens, "%6p", " DEBUG");
testPattern(assert, layout, event, tokens, "%4p", "DEBUG");
testPattern(assert, layout, event, tokens, "%-4p", "DEBUG");
testPattern(assert, layout, event, tokens, "%-6p", "DEBUG ");
testPattern(assert, layout, event, tokens, "%-8p", "DEBUG ");
testPattern(assert, layout, event, tokens, "%-10p", "DEBUG ");
assert.end();
});
t.test("%[%r%] should output colored time", assert => {
testPattern(
assert,
layout,
event,
tokens,
"%[%r%]",
"\x1B[36m14:18:30\x1B[39m"
);
assert.end();
});
t.test(
"%x{testString} should output the string stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{testString}",
"testStringToken"
);
assert.end();
}
);
t.test(
"%x{testFunction} should output the result of the function stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{testFunction}",
"testFunctionToken"
);
assert.end();
}
);
t.test(
"%x{doesNotExist} should output the string stored in tokens",
assert => {
testPattern(assert, layout, event, tokens, "%x{doesNotExist}", "null");
assert.end();
}
);
t.test(
"%x{fnThatUsesLogEvent} should be able to use the logEvent",
assert => {
testPattern(
assert,
layout,
event,
tokens,
"%x{fnThatUsesLogEvent}",
"DEBUG"
);
assert.end();
}
);
t.test("%x should output the string stored in tokens", assert => {
testPattern(assert, layout, event, tokens, "%x", "null");
assert.end();
});
t.test(
"%X{testString} should output the string stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{testString}",
"testStringToken"
);
assert.end();
}
);
t.test(
"%X{testFunction} should output the result of the function stored in tokens",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{testFunction}",
"testFunctionToken"
);
assert.end();
}
);
t.test(
"%X{doesNotExist} should output the string stored in tokens",
assert => {
testPattern(assert, layout, event, {}, "%X{doesNotExist}", "null");
assert.end();
}
);
t.test(
"%X{fnThatUsesLogEvent} should be able to use the logEvent",
assert => {
testPattern(
assert,
layout,
event,
{},
"%X{fnThatUsesLogEvent}",
"DEBUG"
);
assert.end();
}
);
t.test("%X should output the string stored in tokens", assert => {
testPattern(assert, layout, event, {}, "%X", "null");
assert.end();
});
t.end();
});
batch.test("layout makers", t => {
const layouts = require("../../lib/layouts");
t.test("should have a maker for each layout", assert => {
assert.ok(layouts.layout("messagePassThrough"));
assert.ok(layouts.layout("basic"));
assert.ok(layouts.layout("colored"));
assert.ok(layouts.layout("coloured"));
assert.ok(layouts.layout("pattern"));
assert.ok(layouts.layout("dummy"));
assert.end();
});
t.test(
"layout pattern maker should pass pattern and tokens to layout from config",
assert => {
let layout = layouts.layout("pattern", { pattern: "%%" });
assert.equal(layout({}), "%");
layout = layouts.layout("pattern", {
pattern: "%x{testStringToken}",
tokens: { testStringToken: "cheese" }
});
assert.equal(layout({}), "cheese");
assert.end();
}
);
t.end();
});
batch.test("add layout", t => {
const layouts = require("../../lib/layouts");
t.test("should be able to add a layout", assert => {
layouts.addLayout("test_layout", config => {
assert.equal(config, "test_config");
return function(logEvent) {
return `TEST LAYOUT >${logEvent.data}`;
};
});
const serializer = layouts.layout("test_layout", "test_config");
assert.ok(serializer);
assert.equal(serializer({ data: "INPUT" }), "TEST LAYOUT >INPUT");
assert.end();
});
t.end();
});
batch.end();
});
| 1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./test/tap/multiprocess-test.js | const childProcess = require("child_process");
const { test } = require("tap");
const flatted = require("flatted");
const sandbox = require("@log4js-node/sandboxed-module");
const recording = require("../../lib/appenders/recording");
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
},
end() {
fakeNet.closeCalled = true;
}
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: "1.2.3.4",
remotePort: "1234",
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
}
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
test("Multiprocess Appender", async batch => {
batch.beforeEach(() => {
recording.erase();
});
batch.test("worker", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
worker: {
type: "multiprocess",
mode: "worker",
loggerPort: 1234,
loggerHost: "pants"
}
},
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.close();
logger.info("after error, before connect");
fakeNet.cbs.connect();
logger.info("after error, after connect");
logger.error(new Error("Error test"));
const net = fakeNet;
t.test("should open a socket to the loggerPort and loggerHost", assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "pants");
assert.end();
});
t.test(
"should buffer messages written before socket is connected",
assert => {
assert.match(net.data[0], "before connect");
assert.end();
}
);
t.test(
"should write log messages to socket as flatted strings with a terminator string",
assert => {
assert.match(net.data[0], "before connect");
assert.equal(net.data[1], "__LOG4JS__");
assert.match(net.data[2], "after connect");
assert.equal(net.data[3], "__LOG4JS__");
assert.equal(net.encoding, "utf8");
assert.end();
}
);
t.test("should attempt to re-open the socket on error", assert => {
assert.match(net.data[4], "after error, before connect");
assert.equal(net.data[5], "__LOG4JS__");
assert.match(net.data[6], "after error, after connect");
assert.equal(net.data[7], "__LOG4JS__");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.test("should serialize an Error correctly", assert => {
assert.ok(
flatted.parse(net.data[8]).data[0].stack,
`Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(net.data[8]).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.test("worker with timeout", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.timeout();
logger.info("after timeout, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after timeout, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker with error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.error();
logger.info("after error, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after error, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
t.test("should open a socket to localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
batch.test("master", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet,
"./appenders/recording": recording
}
});
log4js.configure({
appenders: {
recorder: { type: "recording" },
master: {
type: "multiprocess",
mode: "master",
loggerPort: 1234,
loggerHost: "server",
appender: "recorder"
}
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
const net = fakeNet;
t.test(
"should listen for log messages on loggerPort and loggerHost",
assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "server");
assert.end();
}
);
t.test("should return the underlying appender", assert => {
log4js
.getLogger()
.info("this should be sent to the actual appender directly");
assert.equal(
recording.replay()[0].data[0],
"this should be sent to the actual appender directly"
);
assert.end();
});
t.test('should log the error on "error" event', assert => {
net.cbs.error(new Error("Expected error"));
const logEvents = recording.replay();
assert.plan(2);
assert.equal(logEvents.length, 1);
assert.equal(
"A worker log process hung up unexpectedly",
logEvents[0].data[0]
);
});
t.test("when a client connects", assert => {
const logString = `${flatted.stringify({
level: { level: 10000, levelStr: "DEBUG" },
data: ["some debug"]
})}__LOG4JS__`;
net.cbs.data(
`${flatted.stringify({
level: { level: 40000, levelStr: "ERROR" },
data: ["an error message"]
})}__LOG4JS__`
);
net.cbs.data(logString.substring(0, 10));
net.cbs.data(logString.substring(10));
net.cbs.data(logString + logString + logString);
net.cbs.end(
`${flatted.stringify({
level: { level: 50000, levelStr: "FATAL" },
data: ["that's all folks"]
})}__LOG4JS__`
);
net.cbs.data("bad message__LOG4JS__");
const logEvents = recording.replay();
// should parse log messages into log events and send to appender
assert.equal(logEvents[0].level.toString(), "ERROR");
assert.equal(logEvents[0].data[0], "an error message");
assert.equal(logEvents[0].remoteAddress, "1.2.3.4");
assert.equal(logEvents[0].remotePort, "1234");
// should parse log messages split into multiple chunks'
assert.equal(logEvents[1].level.toString(), "DEBUG");
assert.equal(logEvents[1].data[0], "some debug");
assert.equal(logEvents[1].remoteAddress, "1.2.3.4");
assert.equal(logEvents[1].remotePort, "1234");
// should parse multiple log messages in a single chunk'
assert.equal(logEvents[2].data[0], "some debug");
assert.equal(logEvents[3].data[0], "some debug");
assert.equal(logEvents[4].data[0], "some debug");
// should handle log messages sent as part of end event'
assert.equal(logEvents[5].data[0], "that's all folks");
// should handle unparseable log messages
assert.equal(logEvents[6].level.toString(), "ERROR");
assert.equal(logEvents[6].categoryName, "log4js");
assert.equal(logEvents[6].data[0], "Unable to parse log:");
assert.equal(logEvents[6].data[1], "bad message");
assert.end();
});
t.end();
});
batch.test("master without actual appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: { master: { type: "multiprocess", mode: "master" } },
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master must have an "appender" defined')
);
t.end();
});
batch.test("master with unknown appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: {
master: { type: "multiprocess", mode: "master", appender: "cheese" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master appender "cheese" not defined')
);
t.end();
});
batch.test("master defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
stdout: { type: "stdout" },
master: { type: "multiprocess", mode: "master", appender: "stdout" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
t.test("should listen for log messages on localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
await batch.test('e2e test', async (assert) => {
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recording: { type: 'recording' },
master: { type: 'multiprocess', mode: 'master', appender: 'recording', loggerPort: 5001 },
},
categories: { default: { appenders: ['recording'], level: 'trace' } },
});
const child = childProcess.fork(
require.resolve('../multiprocess-worker.js'),
['start-multiprocess-worker', '5001'],
{ stdio: 'inherit' }
);
const actualMsg = await new Promise((res, rej) => {
child.on('message', res);
child.on('error', rej);
});
const logEvents = recording.replay();
assert.equal(actualMsg, 'worker is done');
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'Logging from worker');
assert.end();
});
batch.end();
});
| const childProcess = require("child_process");
const { test } = require("tap");
const flatted = require("flatted");
const sandbox = require("@log4js-node/sandboxed-module");
const recording = require("../../lib/appenders/recording");
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
},
end() {
fakeNet.closeCalled = true;
}
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: "1.2.3.4",
remotePort: "1234",
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
}
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
test("Multiprocess Appender", async batch => {
batch.beforeEach(() => {
recording.erase();
});
batch.test("worker", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
worker: {
type: "multiprocess",
mode: "worker",
loggerPort: 1234,
loggerHost: "pants"
}
},
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.close();
logger.info("after error, before connect");
fakeNet.cbs.connect();
logger.info("after error, after connect");
logger.error(new Error("Error test"));
const net = fakeNet;
t.test("should open a socket to the loggerPort and loggerHost", assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "pants");
assert.end();
});
t.test(
"should buffer messages written before socket is connected",
assert => {
assert.match(net.data[0], "before connect");
assert.end();
}
);
t.test(
"should write log messages to socket as flatted strings with a terminator string",
assert => {
assert.match(net.data[0], "before connect");
assert.equal(net.data[1], "__LOG4JS__");
assert.match(net.data[2], "after connect");
assert.equal(net.data[3], "__LOG4JS__");
assert.equal(net.encoding, "utf8");
assert.end();
}
);
t.test("should attempt to re-open the socket on error", assert => {
assert.match(net.data[4], "after error, before connect");
assert.equal(net.data[5], "__LOG4JS__");
assert.match(net.data[6], "after error, after connect");
assert.equal(net.data[7], "__LOG4JS__");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.test("should serialize an Error correctly", assert => {
assert.ok(
flatted.parse(net.data[8]).data[0].stack,
`Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(net.data[8]).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.test("worker with timeout", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.timeout();
logger.info("after timeout, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after timeout, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker with error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.error();
logger.info("after error, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after error, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
t.test("should open a socket to localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
batch.test("master", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet,
"./appenders/recording": recording
}
});
log4js.configure({
appenders: {
recorder: { type: "recording" },
master: {
type: "multiprocess",
mode: "master",
loggerPort: 1234,
loggerHost: "server",
appender: "recorder"
}
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
const net = fakeNet;
t.test(
"should listen for log messages on loggerPort and loggerHost",
assert => {
assert.equal(net.port, 1234);
assert.equal(net.host, "server");
assert.end();
}
);
t.test("should return the underlying appender", assert => {
log4js
.getLogger()
.info("this should be sent to the actual appender directly");
assert.equal(
recording.replay()[0].data[0],
"this should be sent to the actual appender directly"
);
assert.end();
});
t.test('should log the error on "error" event', assert => {
net.cbs.error(new Error("Expected error"));
const logEvents = recording.replay();
assert.plan(2);
assert.equal(logEvents.length, 1);
assert.equal(
"A worker log process hung up unexpectedly",
logEvents[0].data[0]
);
});
t.test("when a client connects", assert => {
const logString = `${flatted.stringify({
level: { level: 10000, levelStr: "DEBUG" },
data: ["some debug"]
})}__LOG4JS__`;
net.cbs.data(
`${flatted.stringify({
level: { level: 40000, levelStr: "ERROR" },
data: ["an error message"]
})}__LOG4JS__`
);
net.cbs.data(logString.slice(0, 10));
net.cbs.data(logString.slice(10));
net.cbs.data(logString + logString + logString);
net.cbs.end(
`${flatted.stringify({
level: { level: 50000, levelStr: "FATAL" },
data: ["that's all folks"]
})}__LOG4JS__`
);
net.cbs.data("bad message__LOG4JS__");
const logEvents = recording.replay();
// should parse log messages into log events and send to appender
assert.equal(logEvents[0].level.toString(), "ERROR");
assert.equal(logEvents[0].data[0], "an error message");
assert.equal(logEvents[0].remoteAddress, "1.2.3.4");
assert.equal(logEvents[0].remotePort, "1234");
// should parse log messages split into multiple chunks'
assert.equal(logEvents[1].level.toString(), "DEBUG");
assert.equal(logEvents[1].data[0], "some debug");
assert.equal(logEvents[1].remoteAddress, "1.2.3.4");
assert.equal(logEvents[1].remotePort, "1234");
// should parse multiple log messages in a single chunk'
assert.equal(logEvents[2].data[0], "some debug");
assert.equal(logEvents[3].data[0], "some debug");
assert.equal(logEvents[4].data[0], "some debug");
// should handle log messages sent as part of end event'
assert.equal(logEvents[5].data[0], "that's all folks");
// should handle unparseable log messages
assert.equal(logEvents[6].level.toString(), "ERROR");
assert.equal(logEvents[6].categoryName, "log4js");
assert.equal(logEvents[6].data[0], "Unable to parse log:");
assert.equal(logEvents[6].data[1], "bad message");
assert.end();
});
t.end();
});
batch.test("master without actual appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: { master: { type: "multiprocess", mode: "master" } },
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master must have an "appender" defined')
);
t.end();
});
batch.test("master with unknown appender throws error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
t.throws(
() =>
log4js.configure({
appenders: {
master: { type: "multiprocess", mode: "master", appender: "cheese" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
}),
new Error('multiprocess master appender "cheese" not defined')
);
t.end();
});
batch.test("master defaults", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: {
stdout: { type: "stdout" },
master: { type: "multiprocess", mode: "master", appender: "stdout" }
},
categories: { default: { appenders: ["master"], level: "trace" } }
});
t.test("should listen for log messages on localhost:5000", assert => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, "localhost");
assert.end();
});
t.end();
});
await batch.test('e2e test', async (assert) => {
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recording: { type: 'recording' },
master: { type: 'multiprocess', mode: 'master', appender: 'recording', loggerPort: 5001 },
},
categories: { default: { appenders: ['recording'], level: 'trace' } },
});
const child = childProcess.fork(
require.resolve('../multiprocess-worker.js'),
['start-multiprocess-worker', '5001'],
{ stdio: 'inherit' }
);
const actualMsg = await new Promise((res, rej) => {
child.on('message', res);
child.on('error', rej);
});
const logEvents = recording.replay();
assert.equal(actualMsg, 'worker is done');
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'Logging from worker');
assert.end();
});
batch.end();
});
| 1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./test/tap/tcp-appender-test.js | const { test } = require("tap");
const net = require("net");
const flatted = require("flatted");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
const LoggingEvent = require("../../lib/LoggingEvent");
let messages = [];
let server = null;
function makeServer(config) {
server = net.createServer(socket => {
socket.setEncoding("utf8");
socket.on("data", data => {
data
.split(config.endMsg)
.filter(s => s.length)
.forEach(s => {
messages.push(config.deserialise(s));
});
});
});
server.unref();
return server;
}
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
return false;
},
end() {
fakeNet.closeCalled = true;
}
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: "1.2.3.4",
remotePort: "1234",
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
}
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
test("TCP Appender", batch => {
batch.test("Default Configuration", t => {
messages = [];
const serverConfig = {
endMsg: "__LOG4JS__",
deserialise: (log) => LoggingEvent.deserialise(log)
}
server = makeServer(serverConfig);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
default: { type: "tcp", port },
},
categories: {
default: { appenders: ["default"], level: "debug" },
}
});
const logger = log4js.getLogger();
logger.info("This should be sent via TCP.");
logger.info("This should also be sent via TCP and not break things.");
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
data: ["This should be sent via TCP."],
categoryName: "default",
context: {},
level: { levelStr: "INFO" }
});
t.match(messages[1], {
data: ["This should also be sent via TCP and not break things."],
categoryName: "default",
context: {},
level: { levelStr: "INFO" }
});
t.end();
});
});
});
});
batch.test("Custom EndMessage String", t => {
messages = [];
const serverConfig = {
endMsg: "\n",
deserialise: (log) => LoggingEvent.deserialise(log)
}
server = makeServer(serverConfig);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
customEndMsg: { type: "tcp", port, endMsg: "\n" },
},
categories: {
default: { appenders: ["customEndMsg"], level: "debug" },
}
});
const logger = log4js.getLogger();
logger.info("This should be sent via TCP using a custom EndMsg string.");
logger.info("This should also be sent via TCP using a custom EndMsg string and not break things.");
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
data: ["This should be sent via TCP using a custom EndMsg string."],
categoryName: "default",
context: {},
level: { levelStr: "INFO" }
});
t.match(messages[1], {
data: ["This should also be sent via TCP using a custom EndMsg string and not break things."],
categoryName: "default",
context: {},
level: { levelStr: "INFO" }
});
t.end();
});
});
});
});
batch.test("Custom Layout", t => {
messages = [];
const serverConfig = {
endMsg: "__LOG4JS__",
deserialise: (log) => JSON.parse(log)
}
server = makeServer(serverConfig);
log4js.addLayout('json', () => function (logEvent) {
return JSON.stringify({
"time": logEvent.startTime,
"message": logEvent.data[0],
"level": logEvent.level.toString()
});
});
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
customLayout: {
type: "tcp", port,
layout: { type: 'json' }
},
},
categories: {
default: { appenders: ["customLayout"], level: "debug" },
}
});
const logger = log4js.getLogger();
logger.info("This should be sent as a customized json.");
logger.info("This should also be sent via TCP as a customized json and not break things.");
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
message: "This should be sent as a customized json.",
level: "INFO"
});
t.match(messages[1], {
message: "This should also be sent via TCP as a customized json and not break things.",
level: "INFO"
});
t.end();
});
});
});
});
batch.test("when underlying stream errors", t => {
const fakeNet = makeFakeNet();
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
sandboxedLog4js.configure({
appenders: {
default: { type: "tcp" },
},
categories: {
default: { appenders: ["default"], level: "debug" },
}
});
const logger = sandboxedLog4js.getLogger();
logger.info("before connect");
t.test(
"should buffer messages written before socket is connected",
assert => {
assert.equal(fakeNet.data.length, 0);
assert.equal(fakeNet.createConnectionCalled, 1);
assert.end();
}
);
fakeNet.cbs.connect();
t.test(
"should flush buffered messages",
assert => {
assert.equal(fakeNet.data.length, 1);
assert.equal(fakeNet.createConnectionCalled, 1);
assert.match(fakeNet.data[0], "before connect");
assert.end();
}
);
logger.info("after connect");
t.test(
"should write log messages to socket as flatted strings with a terminator string",
assert => {
assert.equal(fakeNet.data.length, 2);
assert.match(fakeNet.data[0], "before connect");
assert.ok(fakeNet.data[0].endsWith("__LOG4JS__"));
assert.match(fakeNet.data[1], "after connect");
assert.ok(fakeNet.data[1].endsWith("__LOG4JS__"));
assert.equal(fakeNet.encoding, "utf8");
assert.end();
}
);
fakeNet.cbs.error();
logger.info("after error, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after error, after connect");
t.test("should attempt to re-open the socket on error", assert => {
assert.equal(fakeNet.data.length, 5);
assert.equal(fakeNet.createConnectionCalled, 2);
assert.match(fakeNet.data[2], "after error, before close");
assert.match(fakeNet.data[3], "after close, before connect");
assert.match(fakeNet.data[4], "after error, after connect");
assert.end();
});
t.test("should buffer messages until drain", assert => {
const previousLength = fakeNet.data.length;
logger.info("should not be flushed");
assert.equal(fakeNet.data.length, previousLength);
assert.notMatch(fakeNet.data[fakeNet.data.length - 1], "should not be flushed");
fakeNet.cbs.drain();
assert.equal(fakeNet.data.length, previousLength + 1);
assert.match(fakeNet.data[fakeNet.data.length - 1], "should not be flushed");
assert.end();
});
t.test("should serialize an Error correctly", assert => {
const previousLength = fakeNet.data.length;
logger.error(new Error("Error test"));
fakeNet.cbs.drain();
assert.equal(fakeNet.data.length, previousLength + 1);
const raw = fakeNet.data[fakeNet.data.length - 1];
assert.ok(
flatted.parse(raw.substring(0, raw.indexOf('__LOG4JS__'))).data[0].stack,
`Expected:\n\n${fakeNet.data[6]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(raw.substring(0, raw.indexOf('__LOG4JS__'))).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.end();
});
| const { test } = require("tap");
const net = require("net");
const flatted = require("flatted");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
const LoggingEvent = require("../../lib/LoggingEvent");
let messages = [];
let server = null;
function makeServer(config) {
server = net.createServer(socket => {
socket.setEncoding("utf8");
socket.on("data", data => {
data
.split(config.endMsg)
.filter(s => s.length)
.forEach(s => {
messages.push(config.deserialise(s));
});
});
});
server.unref();
return server;
}
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
return false;
},
end() {
fakeNet.closeCalled = true;
}
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: "1.2.3.4",
remotePort: "1234",
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
}
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
test("TCP Appender", batch => {
batch.test("Default Configuration", t => {
messages = [];
const serverConfig = {
endMsg: "__LOG4JS__",
deserialise: (log) => LoggingEvent.deserialise(log)
}
server = makeServer(serverConfig);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
default: { type: "tcp", port },
},
categories: {
default: { appenders: ["default"], level: "debug" },
}
});
const logger = log4js.getLogger();
logger.info("This should be sent via TCP.");
logger.info("This should also be sent via TCP and not break things.");
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
data: ["This should be sent via TCP."],
categoryName: "default",
context: {},
level: { levelStr: "INFO" }
});
t.match(messages[1], {
data: ["This should also be sent via TCP and not break things."],
categoryName: "default",
context: {},
level: { levelStr: "INFO" }
});
t.end();
});
});
});
});
batch.test("Custom EndMessage String", t => {
messages = [];
const serverConfig = {
endMsg: "\n",
deserialise: (log) => LoggingEvent.deserialise(log)
}
server = makeServer(serverConfig);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
customEndMsg: { type: "tcp", port, endMsg: "\n" },
},
categories: {
default: { appenders: ["customEndMsg"], level: "debug" },
}
});
const logger = log4js.getLogger();
logger.info("This should be sent via TCP using a custom EndMsg string.");
logger.info("This should also be sent via TCP using a custom EndMsg string and not break things.");
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
data: ["This should be sent via TCP using a custom EndMsg string."],
categoryName: "default",
context: {},
level: { levelStr: "INFO" }
});
t.match(messages[1], {
data: ["This should also be sent via TCP using a custom EndMsg string and not break things."],
categoryName: "default",
context: {},
level: { levelStr: "INFO" }
});
t.end();
});
});
});
});
batch.test("Custom Layout", t => {
messages = [];
const serverConfig = {
endMsg: "__LOG4JS__",
deserialise: (log) => JSON.parse(log)
}
server = makeServer(serverConfig);
log4js.addLayout('json', () => function (logEvent) {
return JSON.stringify({
"time": logEvent.startTime,
"message": logEvent.data[0],
"level": logEvent.level.toString()
});
});
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
customLayout: {
type: "tcp", port,
layout: { type: 'json' }
},
},
categories: {
default: { appenders: ["customLayout"], level: "debug" },
}
});
const logger = log4js.getLogger();
logger.info("This should be sent as a customized json.");
logger.info("This should also be sent via TCP as a customized json and not break things.");
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
message: "This should be sent as a customized json.",
level: "INFO"
});
t.match(messages[1], {
message: "This should also be sent via TCP as a customized json and not break things.",
level: "INFO"
});
t.end();
});
});
});
});
batch.test("when underlying stream errors", t => {
const fakeNet = makeFakeNet();
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
sandboxedLog4js.configure({
appenders: {
default: { type: "tcp" },
},
categories: {
default: { appenders: ["default"], level: "debug" },
}
});
const logger = sandboxedLog4js.getLogger();
logger.info("before connect");
t.test(
"should buffer messages written before socket is connected",
assert => {
assert.equal(fakeNet.data.length, 0);
assert.equal(fakeNet.createConnectionCalled, 1);
assert.end();
}
);
fakeNet.cbs.connect();
t.test(
"should flush buffered messages",
assert => {
assert.equal(fakeNet.data.length, 1);
assert.equal(fakeNet.createConnectionCalled, 1);
assert.match(fakeNet.data[0], "before connect");
assert.end();
}
);
logger.info("after connect");
t.test(
"should write log messages to socket as flatted strings with a terminator string",
assert => {
assert.equal(fakeNet.data.length, 2);
assert.match(fakeNet.data[0], "before connect");
assert.ok(fakeNet.data[0].endsWith("__LOG4JS__"));
assert.match(fakeNet.data[1], "after connect");
assert.ok(fakeNet.data[1].endsWith("__LOG4JS__"));
assert.equal(fakeNet.encoding, "utf8");
assert.end();
}
);
fakeNet.cbs.error();
logger.info("after error, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after error, after connect");
t.test("should attempt to re-open the socket on error", assert => {
assert.equal(fakeNet.data.length, 5);
assert.equal(fakeNet.createConnectionCalled, 2);
assert.match(fakeNet.data[2], "after error, before close");
assert.match(fakeNet.data[3], "after close, before connect");
assert.match(fakeNet.data[4], "after error, after connect");
assert.end();
});
t.test("should buffer messages until drain", assert => {
const previousLength = fakeNet.data.length;
logger.info("should not be flushed");
assert.equal(fakeNet.data.length, previousLength);
assert.notMatch(fakeNet.data[fakeNet.data.length - 1], "should not be flushed");
fakeNet.cbs.drain();
assert.equal(fakeNet.data.length, previousLength + 1);
assert.match(fakeNet.data[fakeNet.data.length - 1], "should not be flushed");
assert.end();
});
t.test("should serialize an Error correctly", assert => {
const previousLength = fakeNet.data.length;
logger.error(new Error("Error test"));
fakeNet.cbs.drain();
assert.equal(fakeNet.data.length, previousLength + 1);
const raw = fakeNet.data[fakeNet.data.length - 1];
const offset = raw.indexOf('__LOG4JS__');
assert.ok(
flatted.parse(raw.slice(0, offset !== -1 ? offset : 0)).data[0].stack,
`Expected:\n\n${fakeNet.data[6]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(raw.slice(0, offset !== -1 ? offset : 0)).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.end();
});
| 1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./test/tap/file-sighup-test.js | const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const sandbox = require("@log4js-node/sandboxed-module");
const removeFiles = async filenames => {
if (!Array.isArray(filenames))
filenames = [filenames];
const promises = filenames.map(filename => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
test("file appender single SIGHUP handler", t => {
const initialListeners = process.listenerCount("SIGHUP");
let warning;
const originalListener = process.listeners("warning")[process.listeners("warning").length - 1];
const warningListener = error => {
if (error.type === "SIGHUP" && error.name === "MaxListenersExceededWarning") {
warning = error;
return;
}
originalListener(error);
};
process.off("warning", originalListener);
process.on("warning", warningListener);
const config = {
appenders: {},
categories: {
default: { appenders: [], level: 'debug' }
}
};
// create 11 appenders to make nodejs warn for >10 max listeners
const numOfAppenders = 11;
for (let i = 1; i <= numOfAppenders; i++) {
config.appenders[`app${i}`] = { type: 'file', filename: path.join(__dirname, `file${i}.log`) };
config.categories.default.appenders.push(`app${i}`);
}
const log4js = require("../../lib/log4js");
log4js.configure(config);
t.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off("warning", warningListener);
process.on("warning", originalListener);
});
await new Promise(resolve => { log4js.shutdown(resolve); });
const filenames = Object.values(config.appenders).map(appender => appender.filename);
await removeFiles(filenames);
});
t.plan(2);
// next event loop to allow event emitter/listener to happen
setImmediate(() => {
t.notOk(warning, "should not have MaxListenersExceededWarning for SIGHUP");
t.equal(process.listenerCount("SIGHUP") - initialListeners, 1, "should be 1 SIGHUP listener");
t.end();
});
});
test("file appender SIGHUP", t => {
let closeCalled = 0;
let openCalled = 0;
sandbox
.require("../../lib/appenders/file", {
requires: {
streamroller: {
RollingFileStream: class RollingFileStream {
constructor() {
openCalled++;
this.ended = false;
}
on() {
this.dummy = "easier than turning off lint rule";
}
end(cb) {
this.ended = true;
closeCalled++;
cb();
}
write() {
if (this.ended) {
throw new Error("write after end");
}
return true;
}
}
}
}
})
.configure(
{ type: "file", filename: "sighup-test-file" },
{
basicLayout() {
return "whatever";
}
}
);
process.emit("SIGHUP", "SIGHUP", 1);
t.plan(2);
setTimeout(() => {
t.equal(openCalled, 2, "open should be called twice");
t.equal(closeCalled, 1, "close should be called once");
t.end();
}, 100);
});
test("file appender SIGHUP handler leak", t => {
const log4js = require("../../lib/log4js");
const initialListeners = process.listenerCount("SIGHUP");
log4js.configure({
appenders: {
file: { type: "file", filename: "test.log" }
},
categories: { default: { appenders: ["file"], level: "info" } }
});
t.teardown(async () => {
await removeFiles("test.log");
});
t.plan(2);
t.equal(process.listenerCount("SIGHUP"), initialListeners + 1);
log4js.shutdown(() => {
t.equal(process.listenerCount("SIGHUP"), initialListeners);
t.end();
});
}); | const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const sandbox = require("@log4js-node/sandboxed-module");
const removeFiles = async filenames => {
if (!Array.isArray(filenames))
filenames = [filenames];
const promises = filenames.map(filename => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
test("file appender single SIGHUP handler", t => {
const initialListeners = process.listenerCount("SIGHUP");
let warning;
const originalListener = process.listeners("warning")[process.listeners("warning").length - 1];
const warningListener = error => {
if (error.type === "SIGHUP" && error.name === "MaxListenersExceededWarning") {
warning = error;
return;
}
originalListener(error);
};
process.off("warning", originalListener);
process.on("warning", warningListener);
const config = {
appenders: {},
categories: {
default: { appenders: [], level: 'debug' }
}
};
// create 11 appenders to make nodejs warn for >10 max listeners
const numOfAppenders = 11;
for (let i = 1; i <= numOfAppenders; i++) {
config.appenders[`app${i}`] = { type: 'file', filename: path.join(__dirname, `file${i}.log`) };
config.categories.default.appenders.push(`app${i}`);
}
const log4js = require("../../lib/log4js");
log4js.configure(config);
t.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off("warning", warningListener);
process.on("warning", originalListener);
});
await new Promise(resolve => { log4js.shutdown(resolve); });
const filenames = Object.values(config.appenders).map(appender => appender.filename);
await removeFiles(filenames);
});
t.plan(2);
// next event loop to allow event emitter/listener to happen
setImmediate(() => {
t.notOk(warning, "should not have MaxListenersExceededWarning for SIGHUP");
t.equal(process.listenerCount("SIGHUP") - initialListeners, 1, "should be 1 SIGHUP listener");
t.end();
});
});
test("file appender SIGHUP", t => {
let closeCalled = 0;
let openCalled = 0;
sandbox
.require("../../lib/appenders/file", {
requires: {
streamroller: {
RollingFileStream: class RollingFileStream {
constructor() {
openCalled++;
this.ended = false;
}
on() {
this.dummy = "easier than turning off lint rule";
}
end(cb) {
this.ended = true;
closeCalled++;
cb();
}
write() {
if (this.ended) {
throw new Error("write after end");
}
return true;
}
}
}
}
})
.configure(
{ type: "file", filename: "sighup-test-file" },
{
basicLayout() {
return "whatever";
}
}
);
process.emit("SIGHUP", "SIGHUP", 1);
t.plan(2);
setTimeout(() => {
t.equal(openCalled, 2, "open should be called twice");
t.equal(closeCalled, 1, "close should be called once");
t.end();
}, 100);
});
test("file appender SIGHUP handler leak", t => {
const log4js = require("../../lib/log4js");
const initialListeners = process.listenerCount("SIGHUP");
log4js.configure({
appenders: {
file: { type: "file", filename: "test.log" }
},
categories: { default: { appenders: ["file"], level: "info" } }
});
t.teardown(async () => {
await removeFiles("test.log");
});
t.plan(2);
t.equal(process.listenerCount("SIGHUP"), initialListeners + 1);
log4js.shutdown(() => {
t.equal(process.listenerCount("SIGHUP"), initialListeners);
t.end();
});
}); | -1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./test/tap/no-cluster-test.js | const { test } = require("tap");
const proxyquire = require("proxyquire");
test("clustering is disabled if cluster is not present", t => {
const log4js = proxyquire("../../lib/log4js", { cluster: null });
const recorder = require("../../lib/appenders/recording");
log4js.configure({
appenders: { vcr: { type: "recording" } },
categories: { default: { appenders: ["vcr"], level: "debug" } }
});
log4js.getLogger().info("it should still work");
const events = recorder.replay();
t.equal(events[0].data[0], "it should still work");
t.end();
});
| const { test } = require("tap");
const proxyquire = require("proxyquire");
test("clustering is disabled if cluster is not present", t => {
const log4js = proxyquire("../../lib/log4js", { cluster: null });
const recorder = require("../../lib/appenders/recording");
log4js.configure({
appenders: { vcr: { type: "recording" } },
categories: { default: { appenders: ["vcr"], level: "debug" } }
});
log4js.getLogger().info("it should still work");
const events = recorder.replay();
t.equal(events[0].data[0], "it should still work");
t.end();
});
| -1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./examples/memory-test.js | const log4js = require('../lib/log4js');
log4js.configure(
{
appenders: {
logs: {
type: 'file',
filename: 'memory-test.log'
},
console: {
type: 'stdout',
},
file: {
type: 'file',
filename: 'memory-usage.log',
layout: {
type: 'messagePassThrough'
}
}
},
categories: {
default: { appenders: ['console'], level: 'info' },
'memory-test': { appenders: ['logs'], level: 'info' },
'memory-usage': { appenders: ['console', 'file'], level: 'info' }
}
}
);
const logger = log4js.getLogger('memory-test');
const usage = log4js.getLogger('memory-usage');
for (let i = 0; i < 1000000; i += 1) {
if ((i % 5000) === 0) {
usage.info('%d %d', i, process.memoryUsage().rss);
}
logger.info('Doing something.');
}
log4js.shutdown(() => {});
| const log4js = require('../lib/log4js');
log4js.configure(
{
appenders: {
logs: {
type: 'file',
filename: 'memory-test.log'
},
console: {
type: 'stdout',
},
file: {
type: 'file',
filename: 'memory-usage.log',
layout: {
type: 'messagePassThrough'
}
}
},
categories: {
default: { appenders: ['console'], level: 'info' },
'memory-test': { appenders: ['logs'], level: 'info' },
'memory-usage': { appenders: ['console', 'file'], level: 'info' }
}
}
);
const logger = log4js.getLogger('memory-test');
const usage = log4js.getLogger('memory-usage');
for (let i = 0; i < 1000000; i += 1) {
if ((i % 5000) === 0) {
usage.info('%d %d', i, process.memoryUsage().rss);
}
logger.info('Doing something.');
}
log4js.shutdown(() => {});
| -1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./lib/logger.js | /* eslint no-underscore-dangle: ["error", { "allow": ["_log"] }] */
const debug = require("debug")("log4js:logger");
const LoggingEvent = require("./LoggingEvent");
const levels = require("./levels");
const clustering = require("./clustering");
const categories = require("./categories");
const configuration = require("./configuration");
const stackReg = /at (?:(.+)\s+\()?(?:(.+?):(\d+)(?::(\d+))?|([^)]+))\)?/;
function defaultParseCallStack(data, skipIdx = 4) {
try {
const stacklines = data.stack.split("\n").slice(skipIdx);
const lineMatch = stackReg.exec(stacklines[0]);
/* istanbul ignore else: failsafe */
if (lineMatch && lineMatch.length === 6) {
return {
functionName: lineMatch[1],
fileName: lineMatch[2],
lineNumber: parseInt(lineMatch[3], 10),
columnNumber: parseInt(lineMatch[4], 10),
callStack: stacklines.join("\n")
};
} else { // eslint-disable-line no-else-return
// will never get here unless nodejs has changes to Error
console.error('log4js.logger - defaultParseCallStack error'); // eslint-disable-line no-console
}
}
catch (err) {
// will never get error unless nodejs has breaking changes to Error
console.error('log4js.logger - defaultParseCallStack error', err); // eslint-disable-line no-console
}
return null;
}
/**
* Logger to log messages.
* use {@see log4js#getLogger(String)} to get an instance.
*
* @name Logger
* @namespace Log4js
* @param name name of category to log to
* @param level - the loglevel for the category
* @param dispatch - the function which will receive the logevents
*
* @author Stephan Strittmatter
*/
class Logger {
constructor(name) {
if (!name) {
throw new Error("No category provided.");
}
this.category = name;
this.context = {};
this.parseCallStack = defaultParseCallStack;
debug(`Logger created (${this.category}, ${this.level})`);
}
get level() {
return levels.getLevel(
categories.getLevelForCategory(this.category),
levels.OFF
);
}
set level(level) {
categories.setLevelForCategory(
this.category,
levels.getLevel(level, this.level)
);
}
get useCallStack() {
return categories.getEnableCallStackForCategory(this.category);
}
set useCallStack(bool) {
categories.setEnableCallStackForCategory(this.category, bool === true);
}
log(level, ...args) {
let logLevel = levels.getLevel(level);
if (!logLevel) {
this._log(levels.WARN, 'log4js:logger.log: invalid value for log-level as first parameter given: ', level);
logLevel = levels.INFO;
}
if (this.isLevelEnabled(logLevel)) {
this._log(logLevel, args);
}
}
isLevelEnabled(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
}
_log(level, data) {
debug(`sending log data (${level}) to appenders`);
const loggingEvent = new LoggingEvent(
this.category,
level,
data,
this.context,
this.useCallStack && this.parseCallStack(new Error())
);
clustering.send(loggingEvent);
}
addContext(key, value) {
this.context[key] = value;
}
removeContext(key) {
delete this.context[key];
}
clearContext() {
this.context = {};
}
setParseCallStackFunction(parseFunction) {
this.parseCallStack = parseFunction;
}
}
function addLevelMethods(target) {
const level = levels.getLevel(target);
const levelStrLower = level.toString().toLowerCase();
const levelMethod = levelStrLower.replace(/_([a-z])/g, g =>
g[1].toUpperCase()
);
const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
Logger.prototype[`is${isLevelMethod}Enabled`] = function () {
return this.isLevelEnabled(level);
};
Logger.prototype[levelMethod] = function (...args) {
this.log(level, ...args);
};
}
levels.levels.forEach(addLevelMethods);
configuration.addListener(() => {
levels.levels.forEach(addLevelMethods);
});
module.exports = Logger;
| /* eslint no-underscore-dangle: ["error", { "allow": ["_log"] }] */
const debug = require("debug")("log4js:logger");
const LoggingEvent = require("./LoggingEvent");
const levels = require("./levels");
const clustering = require("./clustering");
const categories = require("./categories");
const configuration = require("./configuration");
const stackReg = /at (?:(.+)\s+\()?(?:(.+?):(\d+)(?::(\d+))?|([^)]+))\)?/;
function defaultParseCallStack(data, skipIdx = 4) {
try {
const stacklines = data.stack.split("\n").slice(skipIdx);
const lineMatch = stackReg.exec(stacklines[0]);
/* istanbul ignore else: failsafe */
if (lineMatch && lineMatch.length === 6) {
return {
functionName: lineMatch[1],
fileName: lineMatch[2],
lineNumber: parseInt(lineMatch[3], 10),
columnNumber: parseInt(lineMatch[4], 10),
callStack: stacklines.join("\n")
};
} else { // eslint-disable-line no-else-return
// will never get here unless nodejs has changes to Error
console.error('log4js.logger - defaultParseCallStack error'); // eslint-disable-line no-console
}
}
catch (err) {
// will never get error unless nodejs has breaking changes to Error
console.error('log4js.logger - defaultParseCallStack error', err); // eslint-disable-line no-console
}
return null;
}
/**
* Logger to log messages.
* use {@see log4js#getLogger(String)} to get an instance.
*
* @name Logger
* @namespace Log4js
* @param name name of category to log to
* @param level - the loglevel for the category
* @param dispatch - the function which will receive the logevents
*
* @author Stephan Strittmatter
*/
class Logger {
constructor(name) {
if (!name) {
throw new Error("No category provided.");
}
this.category = name;
this.context = {};
this.parseCallStack = defaultParseCallStack;
debug(`Logger created (${this.category}, ${this.level})`);
}
get level() {
return levels.getLevel(
categories.getLevelForCategory(this.category),
levels.OFF
);
}
set level(level) {
categories.setLevelForCategory(
this.category,
levels.getLevel(level, this.level)
);
}
get useCallStack() {
return categories.getEnableCallStackForCategory(this.category);
}
set useCallStack(bool) {
categories.setEnableCallStackForCategory(this.category, bool === true);
}
log(level, ...args) {
let logLevel = levels.getLevel(level);
if (!logLevel) {
this._log(levels.WARN, 'log4js:logger.log: invalid value for log-level as first parameter given: ', level);
logLevel = levels.INFO;
}
if (this.isLevelEnabled(logLevel)) {
this._log(logLevel, args);
}
}
isLevelEnabled(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
}
_log(level, data) {
debug(`sending log data (${level}) to appenders`);
const loggingEvent = new LoggingEvent(
this.category,
level,
data,
this.context,
this.useCallStack && this.parseCallStack(new Error())
);
clustering.send(loggingEvent);
}
addContext(key, value) {
this.context[key] = value;
}
removeContext(key) {
delete this.context[key];
}
clearContext() {
this.context = {};
}
setParseCallStackFunction(parseFunction) {
this.parseCallStack = parseFunction;
}
}
function addLevelMethods(target) {
const level = levels.getLevel(target);
const levelStrLower = level.toString().toLowerCase();
const levelMethod = levelStrLower.replace(/_([a-z])/g, g =>
g[1].toUpperCase()
);
const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
Logger.prototype[`is${isLevelMethod}Enabled`] = function () {
return this.isLevelEnabled(level);
};
Logger.prototype[levelMethod] = function (...args) {
this.log(level, ...args);
};
}
levels.levels.forEach(addLevelMethods);
configuration.addListener(() => {
levels.levels.forEach(addLevelMethods);
});
module.exports = Logger;
| -1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./test/multiprocess-worker.js | if (process.argv.indexOf('start-multiprocess-worker') >= 0) {
const log4js = require('../lib/log4js');
const port = parseInt(process.argv[process.argv.length - 1], 10);
log4js.configure({
appenders: {
multi: { type: 'multiprocess', mode: 'worker', loggerPort: port },
},
categories: { default: { appenders: ['multi'], level: 'debug' } }
});
log4js.getLogger('worker').info('Logging from worker');
log4js.shutdown(() => {
process.send('worker is done');
});
}
| if (process.argv.indexOf('start-multiprocess-worker') >= 0) {
const log4js = require('../lib/log4js');
const port = parseInt(process.argv[process.argv.length - 1], 10);
log4js.configure({
appenders: {
multi: { type: 'multiprocess', mode: 'worker', loggerPort: port },
},
categories: { default: { appenders: ['multi'], level: 'debug' } }
});
log4js.getLogger('worker').info('Logging from worker');
log4js.shutdown(() => {
process.send('worker is done');
});
}
| -1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./test/tap/dateFileAppender-test.js | /* eslint max-classes-per-file: ["error", 3] */
const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const EOL = require("os").EOL || "\n";
const format = require("date-format");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
function removeFile(filename) {
try {
fs.unlinkSync(path.join(__dirname, filename));
} catch (e) {
// doesn't matter
}
}
test("../../lib/appenders/dateFile", batch => {
batch.test("with default settings", t => {
const testFile = path.join(__dirname, "date-appender-default.log");
log4js.configure({
appenders: { date: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["date"], level: "DEBUG" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("This should be in the file.");
t.teardown(() => {
removeFile("date-appender-default.log");
});
setTimeout(() => {
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, "This should be in the file");
t.match(
contents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
}, 100);
});
batch.test("configure with dateFileAppender", t => {
log4js.configure({
appenders: {
date: {
type: "dateFile",
filename: "test/tap/date-file-test.log",
pattern: "-yyyy-MM-dd",
layout: { type: "messagePassThrough" }
}
},
categories: { default: { appenders: ["date"], level: "WARN" } }
});
const logger = log4js.getLogger("tests");
logger.info("this should not be written to the file");
logger.warn("this should be written to the file");
log4js.shutdown(() => {
fs.readFile(
path.join(__dirname, "date-file-test.log"),
"utf8",
(err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(
contents.indexOf("this should not be written to the file"),
-1
);
t.end();
}
);
});
t.teardown(() => {
removeFile("date-file-test.log");
});
});
batch.test("configure with options.alwaysIncludePattern", t => {
const options = {
appenders: {
date: {
category: "tests",
type: "dateFile",
filename: "test/tap/date-file-test",
pattern: "yyyy-MM-dd.log",
alwaysIncludePattern: true,
layout: {
type: "messagePassThrough"
}
}
},
categories: { default: { appenders: ["date"], level: "debug" } }
};
const thisTime = format.asString(
options.appenders.date.pattern,
new Date()
);
const testFile = `date-file-test.${thisTime}`;
const existingFile = path.join(
__dirname,
testFile
);
fs.writeFileSync(existingFile, `this is existing data${EOL}`, "utf8");
log4js.configure(options);
const logger = log4js.getLogger("tests");
logger.warn("this should be written to the file with the appended date");
t.teardown(() => {
removeFile(testFile);
});
// wait for filesystem to catch up
log4js.shutdown(() => {
fs.readFile(existingFile, "utf8", (err, contents) => {
t.match(
contents,
"this is existing data",
"should not overwrite the file on open (issue #132)"
);
t.match(
contents,
"this should be written to the file with the appended date"
);
t.end();
});
});
});
batch.test("should flush logs on shutdown", t => {
const testFile = path.join(__dirname, "date-appender-flush.log");
log4js.configure({
appenders: { test: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["test"], level: "trace" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("1");
logger.info("2");
logger.info("3");
t.teardown(() => {
removeFile("date-appender-flush.log");
});
log4js.shutdown(() => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test("should map maxLogSize to maxSize", t => {
const fakeStreamroller = {};
class DateRollingFileStream {
constructor(filename, pattern, options) {
fakeStreamroller.filename = filename;
fakeStreamroller.pattern = pattern;
fakeStreamroller.options = options;
}
on() { } // eslint-disable-line class-methods-use-this
}
fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
const dateFileAppenderModule = sandbox.require(
"../../lib/appenders/dateFile",
{
requires: { streamroller: fakeStreamroller }
}
);
dateFileAppenderModule.configure(
{
filename: "cheese.log",
pattern: "yyyy",
maxLogSize: 100
},
{ basicLayout: () => {} }
);
t.equal(fakeStreamroller.options.maxSize, 100);
t.end();
});
batch.test("handling of writer.writable", t => {
const output = [];
let writable = true;
const DateRollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
on() { // eslint-disable-line class-methods-use-this
}
get writable() { // eslint-disable-line class-methods-use-this
return writable;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
requires: {
streamroller: {
DateRollingFileStream
}
}
});
const appender = dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout(loggingEvent) { return loggingEvent.data; } }
);
t.test("should log when writer.writable=true", assert => {
writable = true;
appender({data: "something to log"});
assert.ok(output.length, 1);
assert.match(output[output.length - 1], "something to log");
assert.end();
});
t.test("should not log when writer.writable=false", assert => {
writable = false;
appender({data: "this should not be logged"});
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], "this should not be logged");
assert.end();
});
t.end();
});
batch.test("when underlying stream errors", t => {
let consoleArgs;
let errorHandler;
const DateRollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === "error") {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
globals: {
console: {
error(...args) {
consoleArgs = args;
}
}
},
requires: {
streamroller: {
DateRollingFileStream
}
}
});
dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: "aargh" });
t.test("should log the error to console.error", assert => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
"log4js.dateFileAppender - Writing to file %s, error happened "
);
assert.equal(consoleArgs[1], "test1.log");
assert.equal(consoleArgs[2].error, "aargh");
assert.end();
});
t.end();
});
batch.end();
});
| /* eslint max-classes-per-file: ["error", 3] */
const { test } = require("tap");
const path = require("path");
const fs = require("fs");
const EOL = require("os").EOL || "\n";
const format = require("date-format");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
function removeFile(filename) {
try {
fs.unlinkSync(path.join(__dirname, filename));
} catch (e) {
// doesn't matter
}
}
test("../../lib/appenders/dateFile", batch => {
batch.test("with default settings", t => {
const testFile = path.join(__dirname, "date-appender-default.log");
log4js.configure({
appenders: { date: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["date"], level: "DEBUG" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("This should be in the file.");
t.teardown(() => {
removeFile("date-appender-default.log");
});
setTimeout(() => {
fs.readFile(testFile, "utf8", (err, contents) => {
t.match(contents, "This should be in the file");
t.match(
contents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
}, 100);
});
batch.test("configure with dateFileAppender", t => {
log4js.configure({
appenders: {
date: {
type: "dateFile",
filename: "test/tap/date-file-test.log",
pattern: "-yyyy-MM-dd",
layout: { type: "messagePassThrough" }
}
},
categories: { default: { appenders: ["date"], level: "WARN" } }
});
const logger = log4js.getLogger("tests");
logger.info("this should not be written to the file");
logger.warn("this should be written to the file");
log4js.shutdown(() => {
fs.readFile(
path.join(__dirname, "date-file-test.log"),
"utf8",
(err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(
contents.indexOf("this should not be written to the file"),
-1
);
t.end();
}
);
});
t.teardown(() => {
removeFile("date-file-test.log");
});
});
batch.test("configure with options.alwaysIncludePattern", t => {
const options = {
appenders: {
date: {
category: "tests",
type: "dateFile",
filename: "test/tap/date-file-test",
pattern: "yyyy-MM-dd.log",
alwaysIncludePattern: true,
layout: {
type: "messagePassThrough"
}
}
},
categories: { default: { appenders: ["date"], level: "debug" } }
};
const thisTime = format.asString(
options.appenders.date.pattern,
new Date()
);
const testFile = `date-file-test.${thisTime}`;
const existingFile = path.join(
__dirname,
testFile
);
fs.writeFileSync(existingFile, `this is existing data${EOL}`, "utf8");
log4js.configure(options);
const logger = log4js.getLogger("tests");
logger.warn("this should be written to the file with the appended date");
t.teardown(() => {
removeFile(testFile);
});
// wait for filesystem to catch up
log4js.shutdown(() => {
fs.readFile(existingFile, "utf8", (err, contents) => {
t.match(
contents,
"this is existing data",
"should not overwrite the file on open (issue #132)"
);
t.match(
contents,
"this should be written to the file with the appended date"
);
t.end();
});
});
});
batch.test("should flush logs on shutdown", t => {
const testFile = path.join(__dirname, "date-appender-flush.log");
log4js.configure({
appenders: { test: { type: "dateFile", filename: testFile } },
categories: { default: { appenders: ["test"], level: "trace" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("1");
logger.info("2");
logger.info("3");
t.teardown(() => {
removeFile("date-appender-flush.log");
});
log4js.shutdown(() => {
fs.readFile(testFile, "utf8", (err, fileContents) => {
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test("should map maxLogSize to maxSize", t => {
const fakeStreamroller = {};
class DateRollingFileStream {
constructor(filename, pattern, options) {
fakeStreamroller.filename = filename;
fakeStreamroller.pattern = pattern;
fakeStreamroller.options = options;
}
on() { } // eslint-disable-line class-methods-use-this
}
fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
const dateFileAppenderModule = sandbox.require(
"../../lib/appenders/dateFile",
{
requires: { streamroller: fakeStreamroller }
}
);
dateFileAppenderModule.configure(
{
filename: "cheese.log",
pattern: "yyyy",
maxLogSize: 100
},
{ basicLayout: () => {} }
);
t.equal(fakeStreamroller.options.maxSize, 100);
t.end();
});
batch.test("handling of writer.writable", t => {
const output = [];
let writable = true;
const DateRollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
on() { // eslint-disable-line class-methods-use-this
}
get writable() { // eslint-disable-line class-methods-use-this
return writable;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
requires: {
streamroller: {
DateRollingFileStream
}
}
});
const appender = dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout(loggingEvent) { return loggingEvent.data; } }
);
t.test("should log when writer.writable=true", assert => {
writable = true;
appender({data: "something to log"});
assert.ok(output.length, 1);
assert.match(output[output.length - 1], "something to log");
assert.end();
});
t.test("should not log when writer.writable=false", assert => {
writable = false;
appender({data: "this should not be logged"});
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], "this should not be logged");
assert.end();
});
t.end();
});
batch.test("when underlying stream errors", t => {
let consoleArgs;
let errorHandler;
const DateRollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === "error") {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
globals: {
console: {
error(...args) {
consoleArgs = args;
}
}
},
requires: {
streamroller: {
DateRollingFileStream
}
}
});
dateFileAppender.configure(
{ filename: "test1.log", maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: "aargh" });
t.test("should log the error to console.error", assert => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
"log4js.dateFileAppender - Writing to file %s, error happened "
);
assert.equal(consoleArgs[1], "test1.log");
assert.equal(consoleArgs[2].error, "aargh");
assert.end();
});
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./examples/cluster.js | 'use strict';
const cluster = require('cluster');
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: { type: 'stdout' }
},
categories: { default: { appenders: ['out'], level: 'debug' } }
});
let logger;
if (cluster.isMaster) {
logger = log4js.getLogger('master');
cluster.fork();
logger.info('master is done', process.pid, new Error('flaps'));
} else {
logger = log4js.getLogger('worker');
logger.info("I'm a worker, with pid ", process.pid, new Error('pants'));
logger.info("I'm a worker, with pid ", process.pid, new Error());
logger.info('cluster.worker ', cluster.worker);
cluster.worker.disconnect();
}
| 'use strict';
const cluster = require('cluster');
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: { type: 'stdout' }
},
categories: { default: { appenders: ['out'], level: 'debug' } }
});
let logger;
if (cluster.isMaster) {
logger = log4js.getLogger('master');
cluster.fork();
logger.info('master is done', process.pid, new Error('flaps'));
} else {
logger = log4js.getLogger('worker');
logger.info("I'm a worker, with pid ", process.pid, new Error('pants'));
logger.info("I'm a worker, with pid ", process.pid, new Error());
logger.info('cluster.worker ', cluster.worker);
cluster.worker.disconnect();
}
| -1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./test/tap/pause-test.js | const tap = require("tap");
const fs = require("fs");
const log4js = require("../../lib/log4js");
const removeFiles = async filenames => {
if (!Array.isArray(filenames))
filenames = [filenames];
const promises = filenames.map(filename => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
tap.test("Drain event test", batch => {
batch.test("Should emit pause event and resume when logging in a file with high frequency", t => {
t.teardown(async () => {
process.off("log4js:pause", process.listeners("log4js:pause")[process.listeners("log4js:pause").length - 1]);
await removeFiles("logs/drain.log");
});
// Generate logger with 5k of highWaterMark config
log4js.configure({
appenders: {
file: { type: "file", filename: "logs/drain.log", highWaterMark: 5 * 1024 }
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
});
let paused = false;
let resumed = false;
process.on("log4js:pause", value => {
if (value) {
paused = true;
t.ok(value, "log4js:pause, true");
} else {
resumed = true;
t.ok(!value, "log4js:pause, false");
t.end();
}
});
const logger = log4js.getLogger();
while (!paused && !resumed) {
if (!paused) {
logger.info("This is a test for emitting drain event");
}
}
});
batch.test("Should emit pause event and resume when logging in a date file with high frequency", (t) => {
t.teardown(async () => {
process.off("log4js:pause", process.listeners("log4js:pause")[process.listeners("log4js:pause").length - 1]);
await removeFiles("logs/date-file-drain.log");
});
// Generate date file logger with 5kb of highWaterMark config
log4js.configure({
appenders: {
file: { type: "dateFile", filename: "logs/date-file-drain.log", highWaterMark: 5 * 1024 }
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
});
let paused = false;
let resumed = false;
process.on("log4js:pause", value => {
if (value) {
paused = true;
t.ok(value, "log4js:pause, true");
} else {
resumed = true;
t.ok(!value, "log4js:pause, false");
t.end();
}
});
const logger = log4js.getLogger();
while (!paused && !resumed) {
if (!paused)
logger.info("This is a test for emitting drain event in date file logger");
}
});
batch.teardown(async () => {
try {
const files = fs.readdirSync("logs");
await removeFiles(files.map(filename => `logs/${filename}`));
fs.rmdirSync("logs");
} catch (e) {
// doesn't matter
}
});
batch.end();
});
| const tap = require("tap");
const fs = require("fs");
const log4js = require("../../lib/log4js");
const removeFiles = async filenames => {
if (!Array.isArray(filenames))
filenames = [filenames];
const promises = filenames.map(filename => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
tap.test("Drain event test", batch => {
batch.test("Should emit pause event and resume when logging in a file with high frequency", t => {
t.teardown(async () => {
process.off("log4js:pause", process.listeners("log4js:pause")[process.listeners("log4js:pause").length - 1]);
await removeFiles("logs/drain.log");
});
// Generate logger with 5k of highWaterMark config
log4js.configure({
appenders: {
file: { type: "file", filename: "logs/drain.log", highWaterMark: 5 * 1024 }
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
});
let paused = false;
let resumed = false;
process.on("log4js:pause", value => {
if (value) {
paused = true;
t.ok(value, "log4js:pause, true");
} else {
resumed = true;
t.ok(!value, "log4js:pause, false");
t.end();
}
});
const logger = log4js.getLogger();
while (!paused && !resumed) {
if (!paused) {
logger.info("This is a test for emitting drain event");
}
}
});
batch.test("Should emit pause event and resume when logging in a date file with high frequency", (t) => {
t.teardown(async () => {
process.off("log4js:pause", process.listeners("log4js:pause")[process.listeners("log4js:pause").length - 1]);
await removeFiles("logs/date-file-drain.log");
});
// Generate date file logger with 5kb of highWaterMark config
log4js.configure({
appenders: {
file: { type: "dateFile", filename: "logs/date-file-drain.log", highWaterMark: 5 * 1024 }
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
});
let paused = false;
let resumed = false;
process.on("log4js:pause", value => {
if (value) {
paused = true;
t.ok(value, "log4js:pause, true");
} else {
resumed = true;
t.ok(!value, "log4js:pause, false");
t.end();
}
});
const logger = log4js.getLogger();
while (!paused && !resumed) {
if (!paused)
logger.info("This is a test for emitting drain event in date file logger");
}
});
batch.teardown(async () => {
try {
const files = fs.readdirSync("logs");
await removeFiles(files.map(filename => `logs/${filename}`));
fs.rmdirSync("logs");
} catch (e) {
// doesn't matter
}
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,223 | refactor: replace deprecated String.prototype.substr() | [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | CommanderRoot | 2022-03-27T00:51:58Z | 2022-03-28T06:43:15Z | b8f16ccb5de645d19fa90f44081bffa287dd206c | 20607e888d6a73df5c332f121f51d9bd5f28a9d9 | refactor: replace deprecated String.prototype.substr(). [String.prototype.substr()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr) is deprecated so we replace it with [String.prototype.slice()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) which works similarily but isn't deprecated. While [String.prototype.substring()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) isn't deprecated I replaced it with `slice()` as well. `slice()` is generally a bit faster (and uses less bites as the name is shorter) and this way we don't have 2 functions which nearly do the same thing in the code.
.substr() probably isn't going away anytime soon but the change is trivial so it doesn't hurt to do it. | ./test/sandbox-coverage.js | const sandbox = require("@log4js-node/sandboxed-module");
sandbox.configure({
sourceTransformers: {
nyc(source) {
if (this.filename.indexOf("node_modules") > -1) {
return source;
}
const nyc = new (require("nyc"))({});
return nyc.instrumenter().instrumentSync(source, this.filename, { registerMap: () => {} });
}
}
});
| const sandbox = require("@log4js-node/sandboxed-module");
sandbox.configure({
sourceTransformers: {
nyc(source) {
if (this.filename.indexOf("node_modules") > -1) {
return source;
}
const nyc = new (require("nyc"))({});
return nyc.instrumenter().instrumentSync(source, this.filename, { registerMap: () => {} });
}
}
});
| -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.