Advanced usage patterns for @aws-lambda-powertools/logger.
import { Logger, LogFormatter, LogItem } from '@aws-lambda-powertools/logger';
import type { LogAttributes, UnformattedAttributes } from '@aws-lambda-powertools/logger/types';
class CustomFormatter extends LogFormatter {
public formatAttributes(
attributes: UnformattedAttributes,
additionalLogAttributes: LogAttributes
): LogItem {
const baseAttributes: LogAttributes = {
'@timestamp': this.formatTimestamp(attributes.timestamp),
'log.level': attributes.logLevel,
'message': attributes.message,
'service.name': attributes.serviceName
};
if (attributes.lambdaContext) {
baseAttributes['aws.function_name'] = attributes.lambdaContext.functionName;
baseAttributes['aws.request_id'] = attributes.lambdaContext.awsRequestId;
}
if (attributes.xRayTraceId) {
baseAttributes['aws.xray_trace_id'] = attributes.xRayTraceId;
}
return new LogItem({ attributes: baseAttributes })
.addAttributes(additionalLogAttributes);
}
}
const logger = new Logger({
serviceName: 'myService',
logFormatter: new CustomFormatter()
});class ECSFormatter extends LogFormatter {
public formatAttributes(
attributes: UnformattedAttributes,
additionalLogAttributes: LogAttributes
): LogItem {
const ecsAttributes: LogAttributes = {
'@timestamp': this.formatTimestamp(attributes.timestamp),
'log.level': attributes.logLevel.toLowerCase(),
'message': attributes.message,
'ecs.version': '1.12.0',
'service.name': attributes.serviceName
};
if (attributes.lambdaContext) {
ecsAttributes['cloud.provider'] = 'aws';
ecsAttributes['cloud.service.name'] = 'lambda';
ecsAttributes['cloud.account.id'] = this.extractAccountId(
attributes.lambdaContext.invokedFunctionArn
);
ecsAttributes['cloud.region'] = process.env.AWS_REGION;
ecsAttributes['faas.name'] = attributes.lambdaContext.functionName;
ecsAttributes['faas.execution'] = attributes.lambdaContext.awsRequestId;
}
if (attributes.xRayTraceId) {
ecsAttributes['trace.id'] = attributes.xRayTraceId;
}
return new LogItem({ attributes: ecsAttributes })
.addAttributes(additionalLogAttributes);
}
private extractAccountId(arn?: string): string | undefined {
return arn?.split(':')[4];
}
}class DatadogFormatter extends LogFormatter {
public formatAttributes(
attributes: UnformattedAttributes,
additionalLogAttributes: LogAttributes
): LogItem {
const ddAttributes: LogAttributes = {
'timestamp': this.formatTimestamp(attributes.timestamp),
'status': this.mapLogLevel(attributes.logLevel),
'message': attributes.message,
'service': attributes.serviceName,
'ddsource': 'lambda'
};
if (attributes.lambdaContext) {
ddAttributes['lambda.arn'] = attributes.lambdaContext.invokedFunctionArn;
ddAttributes['lambda.request_id'] = attributes.lambdaContext.awsRequestId;
ddAttributes['lambda.function_name'] = attributes.lambdaContext.functionName;
}
if (attributes.xRayTraceId) {
const traceId = this.extractDatadogTraceId(attributes.xRayTraceId);
if (traceId) ddAttributes['dd.trace_id'] = traceId;
}
return new LogItem({ attributes: ddAttributes })
.addAttributes(additionalLogAttributes);
}
private mapLogLevel(level: string): string {
const map: Record<string, string> = {
'TRACE': 'debug', 'DEBUG': 'debug', 'INFO': 'info',
'WARN': 'warn', 'ERROR': 'error', 'CRITICAL': 'critical'
};
return map[level] || 'info';
}
private extractDatadogTraceId(xrayTraceId: string): string | undefined {
return xrayTraceId.match(/1-[a-f0-9]{8}-([a-f0-9]{24})/)?.[1];
}
}class MyFormatter extends LogFormatter {
public formatAttributes(
attributes: UnformattedAttributes,
additionalLogAttributes: LogAttributes
): LogItem {
const baseAttributes: LogAttributes = {
timestamp: this.formatTimestamp(attributes.timestamp),
level: attributes.logLevel,
message: attributes.message
};
// Format errors using built-in helper
if (additionalLogAttributes.error instanceof Error) {
const formattedError = this.formatError(additionalLogAttributes.error);
delete additionalLogAttributes.error;
baseAttributes.error = formattedError;
}
// Extract code location from stack trace
if (attributes.error?.stack) {
baseAttributes.location = this.getCodeLocation(attributes.error.stack);
}
return new LogItem({ attributes: baseAttributes })
.addAttributes(additionalLogAttributes);
}
}class ConditionalFormatter extends LogFormatter {
constructor(private readonly environment: string) {
super();
}
public formatAttributes(
attributes: UnformattedAttributes,
additionalLogAttributes: LogAttributes
): LogItem {
const baseAttributes: LogAttributes = {
timestamp: this.formatTimestamp(attributes.timestamp),
level: attributes.logLevel,
message: attributes.message
};
// Production: minimal fields
if (this.environment === 'production') {
baseAttributes.request_id = attributes.lambdaContext?.awsRequestId;
} else {
// Development: verbose fields
baseAttributes.function_name = attributes.lambdaContext?.functionName;
baseAttributes.function_arn = attributes.lambdaContext?.invokedFunctionArn;
baseAttributes.cold_start = attributes.lambdaContext?.coldStart;
baseAttributes.xray_trace_id = attributes.xRayTraceId;
}
return new LogItem({ attributes: baseAttributes })
.addAttributes(additionalLogAttributes);
}
}
const logger = new Logger({
serviceName: 'myService',
logFormatter: new ConditionalFormatter(process.env.ENVIRONMENT || 'dev')
});Buffer debug logs and flush only on errors:
const logger = new Logger({
serviceName: 'myService',
logBufferOptions: {
enabled: true,
maxBytes: 20480,
flushOnErrorLog: true // Auto-flush when logger.error() called
}
});
export const handler = async (event, context) => {
logger.debug('Starting');
logger.debug('Validating');
logger.debug('Processing');
try {
const result = await process(event);
logger.info('Success'); // Debug logs discarded
return result;
} catch (error) {
logger.error('Failed', error as Error); // Flushes all buffered logs
throw error;
}
};const logger = new Logger({
serviceName: 'myService',
logBufferOptions: {
enabled: true,
maxBytes: 20480,
bufferAtVerbosity: 'DEBUG' // Buffer only DEBUG and below
}
});
export const handler = async (event, context) => {
logger.debug('Step 1');
logger.debug('Step 2');
try {
await process(event);
logger.clearBuffer(); // Discard buffered DEBUG logs
logger.info('Success');
} catch (error) {
logger.flushBuffer(); // Output all DEBUG logs for troubleshooting
logger.error('Failed', error as Error);
}
};const logger = new Logger({
serviceName: 'myService',
logBufferOptions: {
enabled: process.env.ENABLE_LOG_BUFFERING === 'true',
maxBytes: 30720,
flushOnErrorLog: true
}
});const apiLogger = new Logger({ serviceName: 'api' });
const dbLogger = new Logger({ serviceName: 'database' });
const cacheLogger = new Logger({ serviceName: 'cache' });
export const handler = middy(async (event, context) => {
apiLogger.info('Request received');
await dbLogger.info('Querying database');
const data = await db.query();
cacheLogger.info('Caching result');
await cache.set(key, data);
return { statusCode: 200, body: JSON.stringify(data) };
}).use(injectLambdaContext([apiLogger, dbLogger, cacheLogger], { resetKeys: true }));const appLogger = new Logger({
serviceName: 'app',
logLevel: 'INFO',
persistentKeys: { version: '2.0.0', region: process.env.AWS_REGION }
});
const authLogger = appLogger.createChild({
serviceName: 'auth',
persistentKeys: { component: 'authentication' }
});
const validationLogger = appLogger.createChild({
serviceName: 'validation',
persistentKeys: { component: 'input-validation' }
});
// All child loggers inherit parent's version, region, and log level
authLogger.info('User authenticated'); // Includes version, region, component
validationLogger.warn('Invalid input'); // Includes version, region, componentconst operationalLogger = new Logger({ serviceName: 'operations' });
const auditLogger = new Logger({
serviceName: 'audit',
persistentKeys: { logType: 'audit' },
logLevel: 'INFO' // Always log audit events
});
export const handler = async (event, context) => {
operationalLogger.debug('Processing request');
try {
const result = await processOrder(event);
// Audit critical business events
auditLogger.info('Order processed', {
orderId: result.id,
userId: event.userId,
amount: result.amount,
timestamp: new Date().toISOString()
});
operationalLogger.info('Success');
return result;
} catch (error) {
auditLogger.error('Order failed', {
orderId: event.orderId,
userId: event.userId,
error: (error as Error).message
});
throw error;
}
};const logger = new Logger({ serviceName: 'myService' });
export const handler = async (event, context) => {
// Add request-scoped attributes
logger.appendKeys({
requestId: event.requestId,
userId: event.userId,
traceId: event.traceId
});
logger.info('Processing request');
try {
// Add operation-scoped attributes
logger.appendKeys({ operation: 'payment' });
await processPayment(event);
logger.removeKeys(['operation']);
// Add different operation scope
logger.appendKeys({ operation: 'notification' });
await sendNotification(event);
logger.removeKeys(['operation']);
logger.info('Request completed');
} finally {
// Clean up request-scoped attributes
logger.resetKeys();
}
};const logger = new Logger({
serviceName: 'myService',
persistentKeys: { version: '1.0.0' }
});
// Add deployment-specific keys at runtime
logger.appendPersistentKeys({
deploymentId: process.env.DEPLOYMENT_ID,
region: process.env.AWS_REGION,
stage: process.env.STAGE
});
// Update version on hot swap
function updateVersion(newVersion: string): void {
logger.removePersistentKeys(['version']);
logger.appendPersistentKeys({ version: newVersion });
}const logger = new Logger({
serviceName: 'myService',
jsonReplacerFn: (key: string, value: unknown) => {
// Handle Date objects
if (value instanceof Date) {
return { _type: 'Date', _value: value.toISOString() };
}
// Handle Map objects
if (value instanceof Map) {
return { _type: 'Map', _value: Array.from(value.entries()) };
}
// Handle Set objects
if (value instanceof Set) {
return { _type: 'Set', _value: Array.from(value) };
}
// Handle BigInt (default handler also does this)
if (typeof value === 'bigint') {
return value.toString();
}
return value;
}
});const logger = new Logger({
serviceName: 'myService',
jsonReplacerFn: (key: string, value: unknown) => {
const sensitiveKeys = ['password', 'token', 'secret', 'apiKey', 'ssn', 'creditCard'];
if (sensitiveKeys.some(k => key.toLowerCase().includes(k.toLowerCase()))) {
return '[REDACTED]';
}
// Redact email addresses
if (typeof value === 'string' && /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(value)) {
return value.replace(/(?<=.{2}).*(?=@)/, '***');
}
return value;
}
});
logger.info('User data', {
userId: '123',
email: 'user@example.com', // Logged as: us***@example.com
password: 'secret123' // Logged as: [REDACTED]
});Simple alternative to custom formatters when you only need to control key order:
const logger = new Logger({
serviceName: 'myService',
logRecordOrder: ['timestamp', 'level', 'message', 'service', 'requestId']
});
// Output keys will appear in specified order
logger.info('Message', { requestId: '123', userId: '456' });
// { timestamp: '...', level: 'INFO', message: 'Message', service: 'myService', requestId: '123', userId: '456' }Note: Mutually exclusive with logFormatter. Use logRecordOrder for simple ordering, logFormatter for structural changes.
const logger = new Logger({
serviceName: 'myService',
sampleRateValue: 0.01 // 1% base sampling
});
export const handler = async (event, context) => {
logger.refreshSampleRateCalculation();
// Force DEBUG for specific users/conditions
if (event.userId === 'test-user' || event.headers?.['x-debug-mode'] === 'true') {
logger.setLogLevel('DEBUG');
}
logger.debug('Will log 1% of time, or 100% for test users');
logger.info('Always logged');
};let invocationCount = 0;
const logger = new Logger({
serviceName: 'myService',
sampleRateValue: 0.1
});
export const handler = async (event, context) => {
invocationCount++;
// Recalculate sampling every 10 invocations for warm container
if (invocationCount % 10 === 0) {
logger.refreshSampleRateCalculation();
}
logger.debug('Sampled debug log');
logger.info('Normal log');
};class AppError extends Error {
constructor(
message: string,
public code: string,
public statusCode: number,
public details?: unknown
) {
super(message);
this.name = 'AppError';
}
}
const logger = new Logger({ serviceName: 'myService' });
try {
throw new AppError('Payment failed', 'PAYMENT_ERROR', 402, { orderId: '123' });
} catch (error) {
if (error instanceof AppError) {
logger.error('Application error', {
error,
errorCode: error.code,
statusCode: error.statusCode,
details: error.details
});
} else {
logger.error('Unexpected error', error as Error);
}
}const logger = new Logger({ serviceName: 'myService' });
async function processOrder(orderId: string): Promise<void> {
logger.appendKeys({ orderId, operation: 'processOrder' });
try {
await validateOrder(orderId);
await chargePayment(orderId);
await fulfillOrder(orderId);
} catch (error) {
// Error log includes orderId and operation from appendKeys
logger.error('Order processing failed', error as Error);
throw error;
} finally {
logger.removeKeys(['operation']);
}
}const logger = new Logger({ serviceName: 'myService' });
export const handler = async (event, context) => {
// Only compute expensive data if DEBUG is enabled
if (logger.getLevelName() === 'DEBUG') {
const expensiveData = computeExpensiveMetrics();
logger.debug('Metrics', { metrics: expensiveData });
}
// Or check threshold
if (logger.level <= 8) { // DEBUG level threshold
logger.debug('Detailed info', computeDetails());
}
logger.info('Processing');
};const logger = new Logger({ serviceName: 'myService' });
// Expensive serialization
const largeObject = { /* huge data */ };
// Only serialize if level is appropriate
if (logger.shouldLogEvent()) {
logger.info('Large data', { data: largeObject });
} else {
logger.info('Large data received', { size: JSON.stringify(largeObject).length });
}