The zapcore package provides the low-level interfaces and implementations that power zap's logging system. It enables extending zap with custom encoders, write syncers, cores, and other components while maintaining high performance.
import "go.uber.org/zap/zapcore"The zapcore package is organized around several key concepts:
The Core interface is the heart of zap's logging system. All logger operations ultimately delegate to a Core implementation.
type Core interface {
// Check if level is enabled for this core
Enabled(Level) bool
// Create child core with additional context fields
With([]Field) Core
// Check level and prepare entry for writing
Check(Entry, *CheckedEntry) *CheckedEntry
// Write log entry with fields
Write(Entry, []Field) error
// Flush buffered logs
Sync() error
}// Create standard core from encoder, syncer, and level enabler
func NewCore(enc Encoder, ws WriteSyncer, enab LevelEnabler) Core
// Create no-op core that discards all logs
func NewNopCore() Coreimport (
"os"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
// Create a simple core with JSON encoding
encoder := zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig())
writer := zapcore.AddSync(os.Stdout)
core := zapcore.NewCore(encoder, writer, zapcore.InfoLevel)
// Use core to create logger
logger := zap.New(core)
defer logger.Sync()
logger.Info("logging via custom core",
zap.String("component", "core-example"),
)The standard core combines an encoder, write syncer, and level enabler.
// Production-ready core with JSON encoding
func createProductionCore() zapcore.Core {
encoderConfig := zap.NewProductionEncoderConfig()
encoder := zapcore.NewJSONEncoder(encoderConfig)
file, _ := os.OpenFile("app.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
writer := zapcore.AddSync(file)
return zapcore.NewCore(encoder, writer, zapcore.InfoLevel)
}// Duplicate writes to multiple cores
func NewTee(cores ...Core) CoreWrite logs to multiple destinations simultaneously:
func createMultiOutputLogger() *zap.Logger {
// Console output with colored, human-friendly format
consoleConfig := zap.NewDevelopmentEncoderConfig()
consoleConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
consoleEncoder := zapcore.NewConsoleEncoder(consoleConfig)
consoleCore := zapcore.NewCore(
consoleEncoder,
zapcore.AddSync(os.Stdout),
zapcore.DebugLevel,
)
// File output with JSON format
fileConfig := zap.NewProductionEncoderConfig()
fileEncoder := zapcore.NewJSONEncoder(fileConfig)
file, _ := os.OpenFile("app.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
fileCore := zapcore.NewCore(
fileEncoder,
zapcore.AddSync(file),
zapcore.InfoLevel,
)
// Combine both cores
tee := zapcore.NewTee(consoleCore, fileCore)
return zap.New(tee)
}// Create sampling core with basic configuration
func NewSampler(core Core, tick time.Duration, first, thereafter int) Core
// Create sampling core with additional options
func NewSamplerWithOptions(
core Core,
tick time.Duration,
first int,
thereafter int,
opts ...SamplerOption,
) Core
// Sampler options
func SamplerHook(hook func(Entry, SamplingDecision)) SamplerOption
func SamplerTick(tick time.Duration) SamplerOption
func SamplerFirst(first int) SamplerOption
func SamplerThereafter(thereafter int) SamplerOptionSampling reduces log volume by only writing a subset of logs:
import "time"
func createSamplingLogger() *zap.Logger {
core := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
zapcore.InfoLevel,
)
// Log first 10 entries per second, then 1 in every 100
sampler := zapcore.NewSampler(core, time.Second, 10, 100)
return zap.New(sampler)
}
// With sampling hook to track dropped logs
func createSamplerWithHook() *zap.Logger {
core := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
zapcore.InfoLevel,
)
hook := func(entry zapcore.Entry, decision zapcore.SamplingDecision) {
if decision == zapcore.LogDropped {
// Track dropped log metrics
fmt.Printf("Dropped log: %s\n", entry.Message)
}
}
sampler := zapcore.NewSamplerWithOptions(
core,
time.Second,
10, // first
100, // thereafter
zapcore.SamplerHook(hook),
)
return zap.New(sampler)
}// Create core with minimum level floor
func NewIncreaseLevelCore(core Core, lvl LevelEnabler) (Core, error)Dynamically increase the minimum logging level:
func createLevelFilteredLogger() *zap.Logger {
baseCore := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
zapcore.DebugLevel, // Base accepts all levels
)
// Only allow ErrorLevel and above
core, _ := zapcore.NewIncreaseLevelCore(baseCore, zapcore.ErrorLevel)
return zap.New(core)
}// Create core with deferred field evaluation
func NewLazyWith(core Core, fields []Field) CoreDefer field evaluation until the log is actually written:
func createLazyLogger() *zap.Logger {
core := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
zapcore.InfoLevel,
)
// These fields are only evaluated when logs are written
lazyCore := zapcore.NewLazyWith(core, []zapcore.Field{
zap.Time("start_time", time.Now()),
zap.String("hostname", getHostname()),
})
return zap.New(lazyCore)
}
func getHostname() string {
hostname, _ := os.Hostname()
return hostname
}// Register hooks called after each log entry
func RegisterHooks(core Core, hooks ...func(Entry) error) CoreExecute custom logic after each log entry:
func createCoreWithHooks() *zap.Logger {
baseCore := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
zapcore.InfoLevel,
)
errorCountHook := func(entry zapcore.Entry) error {
if entry.Level >= zapcore.ErrorLevel {
// Increment error counter metric
errorCounter.Inc()
}
return nil
}
alertHook := func(entry zapcore.Entry) error {
if entry.Level >= zapcore.ErrorLevel {
// Send alert to monitoring system
sendAlert(entry)
}
return nil
}
coreWithHooks := zapcore.RegisterHooks(baseCore, errorCountHook, alertHook)
return zap.New(coreWithHooks)
}The Encoder interface formats log entries into bytes for output.
type Encoder interface {
ObjectEncoder
// Create deep copy of encoder
Clone() Encoder
// Encode complete entry to buffer
EncodeEntry(Entry, []Field) (*buffer.Buffer, error)
}type ObjectEncoder interface {
// Complex types
AddArray(key string, marshaler ArrayMarshaler) error
AddObject(key string, marshaler ObjectMarshaler) error
// Primitive types
AddBinary(key string, value []byte)
AddBool(key string, value bool)
AddByteString(key string, value []byte)
AddComplex128(key string, value complex128)
AddComplex64(key string, value complex64)
AddDuration(key string, value time.Duration)
AddFloat64(key string, value float64)
AddFloat32(key string, value float32)
AddInt(key string, value int)
AddInt64(key string, value int64)
AddInt32(key string, value int32)
AddInt16(key string, value int16)
AddInt8(key string, value int8)
AddString(key, value string)
AddTime(key string, value time.Time)
AddUint(key string, value uint)
AddUint64(key string, value uint64)
AddUint32(key string, value uint32)
AddUint16(key string, value uint16)
AddUint8(key string, value uint8)
AddUintptr(key string, value uintptr)
AddReflected(key string, value interface{}) error
// Namespace for nested fields
OpenNamespace(key string)
}type ArrayEncoder interface {
PrimitiveArrayEncoder
// Complex array elements
AppendArray(ArrayMarshaler) error
AppendObject(ObjectMarshaler) error
AppendReflected(value interface{}) error
}
type PrimitiveArrayEncoder interface {
AppendBool(bool)
AppendByteString([]byte)
AppendComplex128(complex128)
AppendComplex64(complex64)
AppendFloat64(float64)
AppendFloat32(float32)
AppendInt(int)
AppendInt64(int64)
AppendInt32(int32)
AppendInt16(int16)
AppendInt8(int8)
AppendString(string)
AppendUint(uint)
AppendUint64(uint64)
AppendUint32(uint32)
AppendUint16(uint16)
AppendUint8(uint8)
AppendUintptr(uintptr)
}// Create JSON encoder
func NewJSONEncoder(cfg EncoderConfig) Encoder
// Create human-friendly console encoder
func NewConsoleEncoder(cfg EncoderConfig) Encodertype EncoderConfig struct {
// Keys for log entry components (use OmitKey to omit)
MessageKey string
LevelKey string
TimeKey string
NameKey string
CallerKey string
FunctionKey string
StacktraceKey string
// Line ending configuration
SkipLineEnding bool
LineEnding string
// Encoding functions for entry components
EncodeLevel LevelEncoder
EncodeTime TimeEncoder
EncodeDuration DurationEncoder
EncodeCaller CallerEncoder
EncodeName NameEncoder
// Reflection encoder factory
NewReflectedEncoder func(io.Writer) ReflectedEncoder
// Console encoder separator
ConsoleSeparator string
}Constants for encoder configuration:
const (
// Use empty string to omit a key from output
OmitKey = ""
// Default line ending
DefaultLineEnding = "\n"
)func createCustomEncoder() zapcore.Encoder {
config := zapcore.EncoderConfig{
MessageKey: "msg",
LevelKey: "level",
TimeKey: "ts",
NameKey: "logger",
CallerKey: "caller",
FunctionKey: "func",
StacktraceKey: "stacktrace",
LineEnding: zapcore.DefaultLineEnding,
EncodeLevel: zapcore.CapitalColorLevelEncoder,
EncodeTime: zapcore.ISO8601TimeEncoder,
EncodeDuration: zapcore.StringDurationEncoder,
EncodeCaller: zapcore.ShortCallerEncoder,
EncodeName: zapcore.FullNameEncoder,
}
return zapcore.NewConsoleEncoder(config)
}
// Minimal JSON encoder (omit timestamp and caller)
func createMinimalJSONEncoder() zapcore.Encoder {
config := zap.NewProductionEncoderConfig()
config.TimeKey = zapcore.OmitKey // Omit timestamp
config.CallerKey = zapcore.OmitKey // Omit caller
return zapcore.NewJSONEncoder(config)
}type LevelEncoder func(Level, PrimitiveArrayEncoder)
// Built-in level encoders
var (
LowercaseLevelEncoder LevelEncoder // "debug", "info"
LowercaseColorLevelEncoder LevelEncoder // Colored lowercase
CapitalLevelEncoder LevelEncoder // "DEBUG", "INFO"
CapitalColorLevelEncoder LevelEncoder // Colored uppercase
)type TimeEncoder func(time.Time, PrimitiveArrayEncoder)
// Built-in time encoders
var (
EpochTimeEncoder TimeEncoder // Seconds since Unix epoch
EpochMillisTimeEncoder TimeEncoder // Milliseconds since epoch
EpochNanosTimeEncoder TimeEncoder // Nanoseconds since epoch
ISO8601TimeEncoder TimeEncoder // ISO8601 format
RFC3339TimeEncoder TimeEncoder // RFC3339 format
RFC3339NanoTimeEncoder TimeEncoder // RFC3339 with nanoseconds
)
// Create custom time encoder with layout
func TimeEncoderOfLayout(layout string) TimeEncoderExample of custom time encoding:
// Custom time encoder with specific format
config := zap.NewProductionEncoderConfig()
config.EncodeTime = zapcore.TimeEncoderOfLayout("2006-01-02 15:04:05")
// Or use RFC3339 with nanoseconds
config.EncodeTime = zapcore.RFC3339NanoTimeEncodertype DurationEncoder func(time.Duration, PrimitiveArrayEncoder)
// Built-in duration encoders
var (
SecondsDurationEncoder DurationEncoder // Floating-point seconds
NanosDurationEncoder DurationEncoder // Integer nanoseconds
MillisDurationEncoder DurationEncoder // Integer milliseconds
StringDurationEncoder DurationEncoder // String representation
)type CallerEncoder func(EntryCaller, PrimitiveArrayEncoder)
// Built-in caller encoders
var (
FullCallerEncoder CallerEncoder // Full path with line number
ShortCallerEncoder CallerEncoder // Package/file with line number
)type NameEncoder func(string, PrimitiveArrayEncoder)
// Built-in name encoder
var (
FullNameEncoder NameEncoder // Full logger name
)import (
"encoding/json"
"go.uber.org/zap/buffer"
"go.uber.org/zap/zapcore"
)
// Custom encoder that outputs logs as YAML
type yamlEncoder struct {
zapcore.Encoder
config zapcore.EncoderConfig
}
func NewYAMLEncoder(config zapcore.EncoderConfig) zapcore.Encoder {
return &yamlEncoder{
Encoder: zapcore.NewJSONEncoder(config),
config: config,
}
}
func (enc *yamlEncoder) Clone() zapcore.Encoder {
return &yamlEncoder{
Encoder: enc.Encoder.Clone(),
config: enc.config,
}
}
func (enc *yamlEncoder) EncodeEntry(entry zapcore.Entry, fields []zapcore.Field) (*buffer.Buffer, error) {
// Build a map of all fields
data := make(map[string]interface{})
if enc.config.MessageKey != "" {
data[enc.config.MessageKey] = entry.Message
}
if enc.config.LevelKey != "" {
data[enc.config.LevelKey] = entry.Level.String()
}
if enc.config.TimeKey != "" {
data[enc.config.TimeKey] = entry.Time.Format("2006-01-02T15:04:05.000Z")
}
// Add fields (simplified - real implementation would handle all field types)
for _, field := range fields {
data[field.Key] = field.Interface
}
// Convert to YAML (simplified - would use yaml library in practice)
buf := buffer.NewPool().Get()
buf.AppendString("---\n")
for k, v := range data {
buf.AppendString(k)
buf.AppendString(": ")
buf.AppendString(fmt.Sprintf("%v", v))
buf.AppendString("\n")
}
return buf, nil
}
// Register custom encoder
func init() {
zap.RegisterEncoder("yaml", func(config zapcore.EncoderConfig) (zapcore.Encoder, error) {
return NewYAMLEncoder(config), nil
})
}type WriteSyncer interface {
// Write bytes to output
Write([]byte) (int, error)
// Flush to underlying storage
Sync() error
}// Wrap io.Writer as WriteSyncer (no-op Sync)
func AddSync(w io.Writer) WriteSyncer
// Deprecated: use AddSync
func NewIOSync(w io.Writer) WriteSyncer
// Make WriteSyncer thread-safe with mutex
func Lock(ws WriteSyncer) WriteSyncer
// Fan out writes to multiple syncers
func NewMultiWriteSyncer(ws ...WriteSyncer) WriteSyncertype BufferedWriteSyncer interface {
WriteSyncer
// Stop background flushing and flush remaining data
Stop() error
}
// Create buffered write syncer with options
func NewBufferedWriteSyncer(ws WriteSyncer, opts ...BufferedWriteSyncerOption) BufferedWriteSyncer// Multiple outputs with fanout
func createMultiWriter() zapcore.WriteSyncer {
file1, _ := os.OpenFile("app.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
file2, _ := os.OpenFile("app-backup.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
return zapcore.NewMultiWriteSyncer(
zapcore.AddSync(file1),
zapcore.AddSync(file2),
zapcore.AddSync(os.Stdout),
)
}
// Thread-safe writer
func createLockedWriter() zapcore.WriteSyncer {
file, _ := os.OpenFile("app.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
return zapcore.Lock(zapcore.AddSync(file))
}
// Buffered writer for performance
func createBufferedWriter() zapcore.BufferedWriteSyncer {
file, _ := os.OpenFile("app.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
return zapcore.NewBufferedWriteSyncer(
zapcore.AddSync(file),
zapcore.BufferedWriteSyncerOptionBufferSize(256*1024), // 256KB buffer
zapcore.BufferedWriteSyncerOptionFlushInterval(30*time.Second),
)
}type Level int8
const (
DebugLevel Level = -1 // Debug logs (verbose)
InfoLevel Level = 0 // Info logs (default)
WarnLevel Level = 1 // Warning logs
ErrorLevel Level = 2 // Error logs
DPanicLevel Level = 3 // Panic in development
PanicLevel Level = 4 // Panic always
FatalLevel Level = 5 // Fatal (exits)
InvalidLevel Level = 6 // Invalid level marker
)
// Level methods
func (l Level) String() string
func (l Level) CapitalString() string
func (l Level) Enabled(lvl Level) bool
func (l Level) MarshalText() ([]byte, error)
func (l Level) UnmarshalText(text []byte) error
func (l Level) Set(s string) error
// Parse level from string
func ParseLevel(text string) (Level, error)type LevelEnabler interface {
Enabled(Level) bool
}
// Extract Level from LevelEnabler
func LevelOf(enab LevelEnabler) Level// Dynamic level based on time of day
type timeBasedLevel struct {
debugHours [2]int // Debug during these hours
}
func (t *timeBasedLevel) Enabled(lvl zapcore.Level) bool {
hour := time.Now().Hour()
// Enable debug logs during specified hours
if lvl == zapcore.DebugLevel {
return hour >= t.debugHours[0] && hour < t.debugHours[1]
}
// Enable all other levels
return lvl >= zapcore.InfoLevel
}
func createTimeBasedLogger() *zap.Logger {
levelEnabler := &timeBasedLevel{
debugHours: [2]int{9, 17}, // Debug logs 9am-5pm
}
core := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
levelEnabler,
)
return zap.New(core)
}type Entry struct {
Level Level // Log level
Time time.Time // Timestamp
LoggerName string // Logger name
Message string // Log message
Caller EntryCaller // Caller information
Stack string // Stack trace
}type EntryCaller struct {
Defined bool // Whether caller info available
PC uintptr // Program counter
File string // Source file path
Line int // Line number
Function string // Function name
}
// Caller formatting methods
func (ec EntryCaller) String() string // Short format (pkg/file:line)
func (ec EntryCaller) FullPath() string // Full path format
func (ec EntryCaller) TrimmedPath() string // Trimmed path formattype CheckedEntry struct {
Entry
// ... unexported fields
}
// Write entry with fields
func (ce *CheckedEntry) Write(fields ...Field)
// Add core to write to
func (ce *CheckedEntry) AddCore(ent Entry, core Core) *CheckedEntry
// Add post-write hook
func (ce *CheckedEntry) After(ent Entry, hook CheckWriteHook) *CheckedEntry
// Deprecated: use After
func (ce *CheckedEntry) Should(ent Entry, hook CheckWriteHook) *CheckedEntrytype CheckWriteHook interface {
OnWrite(*CheckedEntry, []Field)
}
type CheckWriteAction uint8
const (
WriteThenNoop CheckWriteAction = 0
WriteThenGoexit CheckWriteAction = 1
WriteThenPanic CheckWriteAction = 2
WriteThenFatal CheckWriteAction = 3
)
// CheckWriteAction implements CheckWriteHook
func (a CheckWriteAction) OnWrite(ce *CheckedEntry, fields []Field)type Field struct {
Key string
Type FieldType
Integer int64
String string
Interface interface{}
}
// Add field to encoder
func (f Field) AddTo(enc ObjectEncoder) error
// Compare fields
func (f Field) Equals(other Field) booltype FieldType uint8
const (
UnknownType FieldType = 0
// Complex types
ArrayMarshalerType FieldType = 1
ObjectMarshalerType FieldType = 2
InlineMarshalerType FieldType = 28
// Primitive types
BinaryType FieldType = 3
BoolType FieldType = 4
ByteStringType FieldType = 5
Complex128Type FieldType = 6
Complex64Type FieldType = 7
DurationType FieldType = 8
Float64Type FieldType = 9
Float32Type FieldType = 10
Int64Type FieldType = 11
Int32Type FieldType = 12
Int16Type FieldType = 13
Int8Type FieldType = 14
StringType FieldType = 15
TimeType FieldType = 16
TimeFullType FieldType = 17
Uint64Type FieldType = 18
Uint32Type FieldType = 19
Uint16Type FieldType = 20
Uint8Type FieldType = 21
UintptrType FieldType = 22
// Special types
ReflectType FieldType = 23
NamespaceType FieldType = 24
StringerType FieldType = 25
ErrorType FieldType = 26
SkipType FieldType = 27
)// Custom object serialization
type ObjectMarshaler interface {
MarshalLogObject(ObjectEncoder) error
}
// Custom array serialization
type ArrayMarshaler interface {
MarshalLogArray(ArrayEncoder) error
}type User struct {
ID int
Username string
Email string
}
func (u User) MarshalLogObject(enc zapcore.ObjectEncoder) error {
enc.AddInt("id", u.ID)
enc.AddString("username", u.Username)
enc.AddString("email", u.Email)
return nil
}
// Usage
logger.Info("user action",
zap.Object("user", User{
ID: 123,
Username: "alice",
Email: "alice@example.com",
}),
)type Users []User
func (users Users) MarshalLogArray(enc zapcore.ArrayEncoder) error {
for _, user := range users {
enc.AppendObject(user)
}
return nil
}
// Usage
logger.Info("batch operation",
zap.Array("users", Users{
{ID: 1, Username: "alice"},
{ID: 2, Username: "bob"},
}),
)type Clock interface {
Now() time.Time
}
// Default system clock
var DefaultClock Clock// Clock that returns fixed time for testing
type fixedClock struct {
t time.Time
}
func (c *fixedClock) Now() time.Time {
return c.t
}
func createLoggerWithFixedTime() *zap.Logger {
core := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
zapcore.InfoLevel,
)
clock := &fixedClock{t: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC)}
return zap.New(core, zap.WithClock(clock))
}type SamplingDecision uint8
const (
LogDropped SamplingDecision = 0
LogSampled SamplingDecision = 1
)
type SamplerOption interface {
// ... unexported methods
}
// Sampling hook called for each sampling decision
func SamplerHook(hook func(Entry, SamplingDecision)) SamplerOption
// Override sampling tick duration
func SamplerTick(tick time.Duration) SamplerOption
// Override first count
func SamplerFirst(first int) SamplerOption
// Override thereafter count
func SamplerThereafter(thereafter int) SamplerOptionfunc createMultiLevelLogger() *zap.Logger {
// Debug and Info to stdout
lowPriorityConfig := zap.NewDevelopmentEncoderConfig()
lowPriorityCore := zapcore.NewCore(
zapcore.NewConsoleEncoder(lowPriorityConfig),
zapcore.AddSync(os.Stdout),
zap.LevelEnablerFunc(func(lvl zapcore.Level) bool {
return lvl >= zapcore.DebugLevel && lvl < zapcore.WarnLevel
}),
)
// Warn and Error to stderr
highPriorityConfig := zap.NewProductionEncoderConfig()
highPriorityCore := zapcore.NewCore(
zapcore.NewJSONEncoder(highPriorityConfig),
zapcore.AddSync(os.Stderr),
zap.LevelEnablerFunc(func(lvl zapcore.Level) bool {
return lvl >= zapcore.WarnLevel
}),
)
// Errors to file
errorFile, _ := os.OpenFile("errors.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
errorCore := zapcore.NewCore(
zapcore.NewJSONEncoder(highPriorityConfig),
zapcore.AddSync(errorFile),
zapcore.ErrorLevel,
)
// Combine all cores
tee := zapcore.NewTee(lowPriorityCore, highPriorityCore, errorCore)
return zap.New(tee)
}// Custom core that filters logs by message pattern
type filterCore struct {
zapcore.Core
pattern *regexp.Regexp
}
func NewFilterCore(core zapcore.Core, pattern string) zapcore.Core {
return &filterCore{
Core: core,
pattern: regexp.MustCompile(pattern),
}
}
func (c *filterCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry {
// Skip logs that match the filter pattern
if c.pattern.MatchString(ent.Message) {
return ce
}
return c.Core.Check(ent, ce)
}
// Usage
func createFilteredLogger() *zap.Logger {
baseCore := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
zapcore.InfoLevel,
)
// Filter out logs containing "debug"
filteredCore := NewFilterCore(baseCore, `(?i)debug`)
return zap.New(filteredCore)
}import "net/http"
// WriteSyncer that sends logs to HTTP endpoint
type httpSyncer struct {
url string
client *http.Client
}
func NewHTTPSyncer(url string) zapcore.WriteSyncer {
return &httpSyncer{
url: url,
client: &http.Client{Timeout: 10 * time.Second},
}
}
func (s *httpSyncer) Write(p []byte) (n int, err error) {
resp, err := s.client.Post(s.url, "application/json", bytes.NewReader(p))
if err != nil {
return 0, err
}
defer resp.Body.Close()
if resp.StatusCode >= 400 {
return 0, fmt.Errorf("HTTP error: %d", resp.StatusCode)
}
return len(p), nil
}
func (s *httpSyncer) Sync() error {
// HTTP writes are synchronous, no buffering to flush
return nil
}
// Usage
func createHTTPLogger() *zap.Logger {
core := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
NewHTTPSyncer("https://logs.example.com/ingest"),
zapcore.InfoLevel,
)
return zap.New(core)
}// Route logs to different cores based on logger name
type routingCore struct {
defaultCore zapcore.Core
routes map[string]zapcore.Core
}
func NewRoutingCore(defaultCore zapcore.Core, routes map[string]zapcore.Core) zapcore.Core {
return &routingCore{
defaultCore: defaultCore,
routes: routes,
}
}
func (c *routingCore) getCore(loggerName string) zapcore.Core {
for prefix, core := range c.routes {
if strings.HasPrefix(loggerName, prefix) {
return core
}
}
return c.defaultCore
}
func (c *routingCore) Enabled(lvl zapcore.Level) bool {
return c.defaultCore.Enabled(lvl)
}
func (c *routingCore) With(fields []zapcore.Field) zapcore.Core {
return &routingCore{
defaultCore: c.defaultCore.With(fields),
routes: c.routes,
}
}
func (c *routingCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry {
return c.getCore(ent.LoggerName).Check(ent, ce)
}
func (c *routingCore) Write(ent zapcore.Entry, fields []zapcore.Field) error {
return c.getCore(ent.LoggerName).Write(ent, fields)
}
func (c *routingCore) Sync() error {
if err := c.defaultCore.Sync(); err != nil {
return err
}
for _, core := range c.routes {
if err := core.Sync(); err != nil {
return err
}
}
return nil
}
// Usage
func createRoutingLogger() *zap.Logger {
defaultCore := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
zapcore.InfoLevel,
)
// Database logs go to separate file
dbFile, _ := os.OpenFile("database.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
dbCore := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(dbFile),
zapcore.DebugLevel,
)
// HTTP logs go to another file
httpFile, _ := os.OpenFile("http.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
httpCore := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(httpFile),
zapcore.InfoLevel,
)
routingCore := NewRoutingCore(defaultCore, map[string]zapcore.Core{
"database": dbCore,
"http": httpCore,
})
return zap.New(routingCore)
}The zapcore package is designed for minimal allocations:
// Pre-allocate fields for repeated use
var (
userIDField = zap.Int("user_id", 0)
requestIDField = zap.String("request_id", "")
)
func logRequest(userID int, requestID string) {
// Reuse field definitions, only update values
logger.Info("request processed",
zap.Int("user_id", userID),
zap.String("request_id", requestID),
)
}For high-throughput scenarios, use buffered write syncers:
func createHighThroughputLogger() (*zap.Logger, func()) {
file, _ := os.OpenFile("app.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
bufferedSyncer := zapcore.NewBufferedWriteSyncer(
zapcore.AddSync(file),
zapcore.BufferedWriteSyncerOptionBufferSize(512*1024), // 512KB buffer
zapcore.BufferedWriteSyncerOptionFlushInterval(5*time.Second),
)
core := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
bufferedSyncer,
zapcore.InfoLevel,
)
logger := zap.New(core)
// Return cleanup function
cleanup := func() {
bufferedSyncer.Stop()
logger.Sync()
}
return logger, cleanup
}Use sampling to reduce log volume in hot paths:
func createSampledLogger() *zap.Logger {
core := zapcore.NewCore(
zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()),
zapcore.AddSync(os.Stdout),
zapcore.InfoLevel,
)
// In a 1-second interval:
// - Log first 100 messages
// - Then log 1 out of every 1000 messages
sampledCore := zapcore.NewSampler(core, time.Second, 100, 1000)
return zap.New(sampledCore)
}import (
"testing"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
"go.uber.org/zap/zaptest/observer"
)
func TestLogging(t *testing.T) {
// Create observing core for tests
core, recorded := observer.New(zapcore.InfoLevel)
logger := zap.New(core)
// Execute code that logs
logger.Info("test message", zap.String("key", "value"))
// Assert on recorded logs
logs := recorded.All()
if len(logs) != 1 {
t.Errorf("expected 1 log entry, got %d", len(logs))
}
entry := logs[0]
if entry.Message != "test message" {
t.Errorf("expected message 'test message', got '%s'", entry.Message)
}
if entry.ContextMap()["key"] != "value" {
t.Errorf("expected field key=value")
}
}Use NewTee with level-filtered cores to route different log levels to different destinations.
Use NewSampler or NewSamplerWithOptions to reduce log volume while preserving visibility.
Implement the Encoder interface to create custom output formats beyond JSON and console.
Use AtomicLevel (from main zap package) with NewCore to change logging levels at runtime.
Create custom Core implementations that filter based on entry fields, message content, or other criteria.
Implement WriteSyncer to send logs to external services like Elasticsearch, Kafka, or cloud logging services.