I have several modules - let\'s say server.js, module1.js,...,moduleN.js.
I would like define the log file in my server.js:
winston.add(winston.trans
I am working on Winston 3.0.0 right now. And it seems the way to configure the default logger has changed a little bit. The way that works for me is folloing:
log.js// the setting for global logger
const winston= require('winston');
winston.configure({
level:"debug",
format: winston.format.combine(
winston.format.colorize(),
winston.format.simple()
),
transports: [
new winston.transports.Console()
]
});
The other part is the same.
In the beginning of you application, require('log.js')
, and also require ('winston'),
While in all other files, simply require('winston')
.
I use a factory function and pass in the module name so it can be added to the meta data:
logger-factory.js
const path = require('path');
const { createLogger, format, transports } = require('winston');
const { combine, errors, timestamp } = format;
const baseFormat = combine(
timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
errors({ stack: true }),
format((info) => {
info.level = info.level.toUpperCase();
return info;
})(),
);
const splunkFormat = combine(
baseFormat,
format.json(),
);
const prettyFormat = combine(
baseFormat,
format.prettyPrint(),
);
const createCustomLogger = (moduleName) => createLogger({
level: process.env.LOG_LEVEL,
format: process.env.PRETTY_LOGS ? prettyFormat : splunkFormat,
defaultMeta: { module: path.basename(moduleName) },
transports: [
new transports.Console(),
],
});
module.exports = createCustomLogger;
app-harness.js (so I can run the exported index module)
const index = require('./index');
// https://docs.aws.amazon.com/lambda/latest/dg/with-s3.html
const sampleEvent = {
"Records": [
{
"eventVersion": "2.1",
"eventSource": "aws:s3",
"awsRegion": "us-east-2",
"eventTime": "2019-09-03T19:37:27.192Z",
"eventName": "ObjectCreated:Put",
"userIdentity": {
"principalId": "AWS:AIDAINPONIXQXHT3IKHL2"
},
"requestParameters": {
"sourceIPAddress": "205.255.255.255"
},
"responseElements": {
"x-amz-request-id": "D82B88E5F771F645",
"x-amz-id-2": "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
},
"s3": {
"s3SchemaVersion": "1.0",
"configurationId": "828aa6fc-f7b5-4305-8584-487c791949c1",
"bucket": {
"name": "lambda-artifacts-deafc19498e3f2df",
"ownerIdentity": {
"principalId": "A3I5XTEXAMAI3E"
},
"arn": "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
},
"object": {
"key": "b21b84d653bb07b05b1e6b33684dc11b",
"size": 1305107,
"eTag": "b21b84d653bb07b05b1e6b33684dc11b",
"sequencer": "0C0F6F405D6ED209E1"
}
}
}
]
};
index.handler(sampleEvent)
.then(() => console.log('SUCCESS'))
.catch((_) => console.log('FAILURE'));
index.js
const logger = require('./logger-factory')(__filename);
const app = require('./app');
exports.handler = async function (event) {
try {
logger.debug('lambda triggered with event', { event });
await app.run(event);
logger.debug(`lambda finished`);
} catch(error) {
logger.error('lambda failed: ', error);
// rethrow the error up to AWS
throw error;
}
}
app.js
const logger = require('./logger-factory')(__filename);
const run = async (event) => {
logger.info('processing S3 event', event);
try {
logger.info('reading s3 file')
// throws because I used "Record" instead of "Records"
const s3 = event.Record[0].s3;
// use s3 to read the file
} catch (error) {
logger.error('failed to read from S3: ', error);
throw error;
}
};
module.exports = { run };
when I run the application locally at WARN
level:
~/repos/ghe/lambda-logging (master * u=)> LOG_LEVEL=warn node -r dotenv/config ./src/app-harness.js
{
module: 'app.js',
level: 'ERROR',
message: "failed to read from S3: Cannot read property '0' of undefined",
stack: "TypeError: Cannot read property '0' of undefined\n" +
' at Object.run (/Users/jason.berk/repos/ghe/lambda-logging/src/app.js:8:28)\n' +
' at Object.exports.handler (/Users/jason.berk/repos/ghe/lambda-logging/src/index.js:7:15)\n' +
' at Object.<anonymous> (/Users/jason.berk/repos/ghe/lambda-logging/src/test-harness.js:44:7)\n' +
' at Module._compile (internal/modules/cjs/loader.js:1158:30)\n' +
' at Object.Module._extensions..js (internal/modules/cjs/loader.js:1178:10)\n' +
' at Module.load (internal/modules/cjs/loader.js:1002:32)\n' +
' at Function.Module._load (internal/modules/cjs/loader.js:901:14)\n' +
' at Function.executeUserEntryPoint [as runMain] (internal/modules/run_main.js:74:12)\n' +
' at internal/main/run_main_module.js:18:47',
timestamp: '2020-05-11 17:34:06'
}
{
module: 'index.js',
level: 'ERROR',
message: "lambda failed: Cannot read property '0' of undefined",
stack: "TypeError: Cannot read property '0' of undefined\n" +
' at Object.run (/Users/jason.berk/repos/ghe/lambda-logging/src/app.js:8:28)\n' +
' at Object.exports.handler (/Users/jason.berk/repos/ghe/lambda-logging/src/index.js:7:15)\n' +
' at Object.<anonymous> (/Users/jason.berk/repos/ghe/lambda-logging/src/test-harness.js:44:7)\n' +
' at Module._compile (internal/modules/cjs/loader.js:1158:30)\n' +
' at Object.Module._extensions..js (internal/modules/cjs/loader.js:1178:10)\n' +
' at Module.load (internal/modules/cjs/loader.js:1002:32)\n' +
' at Function.Module._load (internal/modules/cjs/loader.js:901:14)\n' +
' at Function.executeUserEntryPoint [as runMain] (internal/modules/run_main.js:74:12)\n' +
' at internal/main/run_main_module.js:18:47',
timestamp: '2020-05-11 17:34:06'
}
when I run at DEBUG
level:
~/repos/ghe/lambda-logging (master * u=)> LOG_LEVEL=debug node -r dotenv/config ./src/test-harness.js
{
module: 'index.js',
event: {
Records: [
{
eventVersion: '2.1',
eventSource: 'aws:s3',
awsRegion: 'us-east-2',
eventTime: '2019-09-03T19:37:27.192Z',
eventName: 'ObjectCreated:Put',
userIdentity: { principalId: 'AWS:AIDAINPONIXQXHT3IKHL2' },
requestParameters: { sourceIPAddress: '205.255.255.255' },
responseElements: {
'x-amz-request-id': 'D82B88E5F771F645',
'x-amz-id-2': 'vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo='
},
s3: {
s3SchemaVersion: '1.0',
configurationId: '828aa6fc-f7b5-4305-8584-487c791949c1',
bucket: {
name: 'lambda-artifacts-deafc19498e3f2df',
ownerIdentity: { principalId: 'A3I5XTEXAMAI3E' },
arn: 'arn:aws:s3:::lambda-artifacts-deafc19498e3f2df'
},
object: {
key: 'b21b84d653bb07b05b1e6b33684dc11b',
size: 1305107,
eTag: 'b21b84d653bb07b05b1e6b33684dc11b',
sequencer: '0C0F6F405D6ED209E1'
}
}
}
]
},
level: 'DEBUG',
message: 'lambda triggered with event',
timestamp: '2020-05-11 17:38:21'
}
{
module: 'app.js',
Records: [
{
eventVersion: '2.1',
eventSource: 'aws:s3',
awsRegion: 'us-east-2',
eventTime: '2019-09-03T19:37:27.192Z',
eventName: 'ObjectCreated:Put',
userIdentity: { principalId: 'AWS:AIDAINPONIXQXHT3IKHL2' },
requestParameters: { sourceIPAddress: '205.255.255.255' },
responseElements: {
'x-amz-request-id': 'D82B88E5F771F645',
'x-amz-id-2': 'vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo='
},
s3: {
s3SchemaVersion: '1.0',
configurationId: '828aa6fc-f7b5-4305-8584-487c791949c1',
bucket: {
name: 'lambda-artifacts-deafc19498e3f2df',
ownerIdentity: { principalId: 'A3I5XTEXAMAI3E' },
arn: 'arn:aws:s3:::lambda-artifacts-deafc19498e3f2df'
},
object: {
key: 'b21b84d653bb07b05b1e6b33684dc11b',
size: 1305107,
eTag: 'b21b84d653bb07b05b1e6b33684dc11b',
sequencer: '0C0F6F405D6ED209E1'
}
}
}
],
level: 'INFO',
message: 'processing S3 event',
timestamp: '2020-05-11 17:38:21'
}
{
message: 'reading s3 file',
level: 'INFO',
module: 'app.js',
timestamp: '2020-05-11 17:38:21'
}
{
module: 'app.js',
level: 'ERROR',
message: "failed to read from S3: Cannot read property '0' of undefined",
stack: "TypeError: Cannot read property '0' of undefined\n" +
' at Object.run (/Users/jason.berk/repos/ghe/lambda-logging/src/app.js:8:28)\n' +
' at Object.exports.handler (/Users/jason.berk/repos/ghe/lambda-logging/src/index.js:7:15)\n' +
' at Object.<anonymous> (/Users/jason.berk/repos/ghe/lambda-logging/src/test-harness.js:44:7)\n' +
' at Module._compile (internal/modules/cjs/loader.js:1158:30)\n' +
' at Object.Module._extensions..js (internal/modules/cjs/loader.js:1178:10)\n' +
' at Module.load (internal/modules/cjs/loader.js:1002:32)\n' +
' at Function.Module._load (internal/modules/cjs/loader.js:901:14)\n' +
' at Function.executeUserEntryPoint [as runMain] (internal/modules/run_main.js:74:12)\n' +
' at internal/main/run_main_module.js:18:47',
timestamp: '2020-05-11 17:38:21'
}
{
module: 'index.js',
level: 'ERROR',
message: "lambda failed: Cannot read property '0' of undefined",
stack: "TypeError: Cannot read property '0' of undefined\n" +
' at Object.run (/Users/jason.berk/repos/ghe/lambda-logging/src/app.js:8:28)\n' +
' at Object.exports.handler (/Users/jason.berk/repos/ghe/lambda-logging/src/index.js:7:15)\n' +
' at Object.<anonymous> (/Users/jason.berk/repos/ghe/lambda-logging/src/test-harness.js:44:7)\n' +
' at Module._compile (internal/modules/cjs/loader.js:1158:30)\n' +
' at Object.Module._extensions..js (internal/modules/cjs/loader.js:1178:10)\n' +
' at Module.load (internal/modules/cjs/loader.js:1002:32)\n' +
' at Function.Module._load (internal/modules/cjs/loader.js:901:14)\n' +
' at Function.executeUserEntryPoint [as runMain] (internal/modules/run_main.js:74:12)\n' +
' at internal/main/run_main_module.js:18:47',
timestamp: '2020-05-11 17:38:21'
}
I'm creating a new Winston logger.
log.js
'use strict';
const winston = require('winston');
module.exports = new(winston.Logger)({
transports: [
new(winston.transports.Console)({
level: 'info'
})
]
});
a.js
const log = require('./log');
log.info("from a.js");
b.js
const log = require('./log');
log.info("from b.js");
Here is my logger configuration with winston version is 3.2.1
.
It storing logs in application.log
file and for error stack trace I am using errors({ stack: true })
and small trick in printf
function to print stack trace in error case.
const {format, transports} = require('winston');
const { timestamp, colorize, printf, errors } = format;
const { Console, File } = transports;
LoggerConfig = {
level: process.env.LOGGER_LEVEL || 'debug',
transports: [
new Console(),
new File({filename: 'application.log'})
],
format: format.combine(
errors({ stack: true }),
timestamp(),
colorize(),
printf(({ level, message, timestamp, stack }) => {
if (stack) {
// print log trace
return `${timestamp} ${level}: ${message} - ${stack}`;
}
return `${timestamp} ${level}: ${message}`;
}),
),
expressFormat: true, // Use the default Express/morgan request formatting
colorize: false, // Color the text and status code, using the Express/morgan color palette (text: gray, status: default green, 3XX cyan, 4XX yellow, 5XX red).
ignoreRoute: function (req, res) {
return false;
} // optional: allows to skip some log messages based on request and/or response
}
I am using this same configuration in express-winston
and for general log also. I declared __logger
object globally so that you don't need to import every time in every file. Generally in node js all the global variable prefix with 2 time underscore(__
) so it will be good to follow this.
Server.js
const winston = require('winston');
const expressWinston = require('express-winston');
/**
* winston.Logger
* logger for specified log message like console.log
*/
global.__logger = winston.createLogger(LoggerConfig);
/**
* logger for every HTTP request comes to app
*/
app.use(expressWinston.logger(LoggerConfig));
__logger
is global so you can use it any place, for example:
blog.controller.js
function save(req, res) {
try {
__logger.debug('Blog add operation');
.
.
return res.send(blog);
} catch (error) {
__logger.error(error);
return res.status(500).send(error);
}
}
Hope this will help !
Just create logger.js and put
const winston = require('winston');
const logger = winston.createLogger({
level: 'info',
format: winston.format.combine(
winston.format.colorize(),
winston.format.simple()
),
transports: [
new winston.transports.Console()
]
});
module.exports = logger
Then you can require and use it anywhere, since logger is now singleton.
const logger = require('./utils/logger');
logger.info('Hello!');
This even gives you an option to swap logging library if needed. The accepted answer is totally wrong and one step closer to spaghetti code.