JSON logging provides machine-readable logs that are essential for log aggregation systems like ELK Stack, Splunk, or cloud logging services. Logback's JSON encoder transforms your application logs into structured JSON format.
Dependencies and Setup
Maven Dependencies
<properties>
<logback.version>1.4.11</logback.version>
<logstash-logback-encoder.version>7.4</logstash-logback-encoder.version>
<jackson.version>2.15.2</jackson.version>
</properties>
<dependencies>
<!-- Logback Core -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<!-- Logstash Logback Encoder -->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>${logstash-logback-encoder.version}</version>
</dependency>
<!-- Jackson for custom JSON serialization -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
</dependencies>
Basic Logback XML Configuration
1. Simple JSON Configuration
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<!-- Console Appender with JSON Layout -->
<appender name="JSON_CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp/>
<logLevel/>
<loggerName/>
<message/>
<threadName/>
<context/>
<mdc/>
<stackTrace/>
</providers>
</encoder>
</appender>
<!-- File Appender with JSON -->
<appender name="JSON_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>logs/application.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>logs/application.%d{yyyy-MM-dd}.json</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>UTC</timeZone>
</timestamp>
<logLevel/>
<loggerName/>
<message/>
<threadName/>
<mdc/>
<stackTrace/>
</providers>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="JSON_CONSOLE"/>
<appender-ref ref="JSON_FILE"/>
</root>
</configuration>
2. Advanced JSON Configuration with Custom Fields
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- Custom Properties -->
<property name="APPLICATION_NAME" value="user-service"/>
<property name="APPLICATION_VERSION" value="1.0.0"/>
<property name="ENVIRONMENT" value="${ENV:-development}"/>
<!-- JSON Console Appender -->
<appender name="JSON_CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="net.logstash.logback.encoder.LogstashEncoder">
<customFields>{"service":"${APPLICATION_NAME}","version":"${APPLICATION_VERSION}","environment":"${ENVIRONMENT}"}</customFields>
<includeContext>true</includeContext>
<includeMdc>true</includeMdc>
<fieldNames>
<timestamp>timestamp</timestamp>
<message>message</message>
<logger>logger</logger>
<level>level</level>
<thread>thread</thread>
<stackTrace>stack_trace</stackTrace>
<mdc>mdc</mdc>
</fieldNames>
</encoder>
</appender>
<!-- Async Appender for Better Performance -->
<appender name="ASYNC_JSON" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="JSON_CONSOLE"/>
<queueSize>1000</queueSize>
<discardingThreshold>0</discardingThreshold>
<includeCallerData>false</includeCallerData>
<maxFlushTime>5000</maxFlushTime>
</appender>
<root level="INFO">
<appender-ref ref="ASYNC_JSON"/>
</root>
<!-- Specific logger configurations -->
<logger name="com.example.service" level="DEBUG" additivity="false">
<appender-ref ref="ASYNC_JSON"/>
</logger>
</configuration>
Custom JSON Encoder Implementations
1. Custom JSON Layout
public class CustomJsonLayout extends LogstashLayout {
private final ObjectMapper objectMapper;
private final String applicationName;
public CustomJsonLayout() {
this.objectMapper = new ObjectMapper();
this.applicationName = System.getProperty("application.name", "unknown");
configureObjectMapper();
}
private void configureObjectMapper() {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
objectMapper.registerModule(new JavaTimeModule());
}
@Override
public String doLayout(ILoggingEvent event) {
try {
LogEntry logEntry = new LogEntry(event, applicationName);
return objectMapper.writeValueAsString(logEntry) + "\n";
} catch (JsonProcessingException e) {
return fallbackLayout(event);
}
}
private String fallbackLayout(ILoggingEvent event) {
return String.format("{\"timestamp\":%d,\"level\":\"%s\",\"message\":\"%s\"}\n",
event.getTimeStamp(),
event.getLevel().toString(),
event.getFormattedMessage().replace("\"", "\\\""));
}
}
// Custom Log Entry Model
class LogEntry {
private final long timestamp;
private final String level;
private final String logger;
private final String message;
private final String thread;
private final String application;
private final Map<String, String> mdc;
private final String stackTrace;
public LogEntry(ILoggingEvent event, String applicationName) {
this.timestamp = event.getTimeStamp();
this.level = event.getLevel().toString();
this.logger = event.getLoggerName();
this.message = event.getFormattedMessage();
this.thread = event.getThreadName();
this.application = applicationName;
this.mdc = copyMdc(event);
this.stackTrace = getStackTrace(event);
}
private Map<String, String> copyMdc(ILoggingEvent event) {
Map<String, String> mdcCopy = new HashMap<>();
if (event.getMDCPropertyMap() != null) {
mdcCopy.putAll(event.getMDCPropertyMap());
}
return mdcCopy.isEmpty() ? null : mdcCopy;
}
private String getStackTrace(ILoggingEvent event) {
if (event.getThrowableProxy() != null) {
return Arrays.stream(event.getThrowableProxy().getStackTraceElementProxyArray())
.map(StackTraceElementProxy::getStackTraceElement)
.map(StackTraceElement::toString)
.collect(Collectors.joining("\n"));
}
return null;
}
// Getters
public long getTimestamp() { return timestamp; }
public String getLevel() { return level; }
public String getLogger() { return logger; }
public String getMessage() { return message; }
public String getThread() { return thread; }
public String getApplication() { return application; }
public Map<String, String> getMdc() { return mdc; }
public String getStackTrace() { return stackTrace; }
}
2. Custom JSON Encoder with Business Context
public class BusinessContextJsonEncoder extends LoggingEventCompositeJsonEncoder {
private final ObjectMapper objectMapper;
private final List<JsonProvider<ILoggingEvent>> customProviders;
public BusinessContextJsonEncoder() {
this.objectMapper = createObjectMapper();
this.customProviders = createCustomProviders();
setProviders(customProviders.toArray(new JsonProvider[0]));
}
private ObjectMapper createObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(JsonInclude.Include.NON_EMPTY);
mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
mapper.registerModule(new JavaTimeModule());
return mapper;
}
private List<JsonProvider<ILoggingEvent>> createCustomProviders() {
List<JsonProvider<ILoggingEvent>> providers = new ArrayList<>();
// Timestamp
TimestampJsonProvider timestampProvider = new TimestampJsonProvider();
timestampProvider.setTimeZone("UTC");
providers.add(timestampProvider);
// Log level
providers.add(new LogLevelJsonProvider());
// Logger name
LoggerNameJsonProvider loggerNameProvider = new LoggerNameJsonProvider();
loggerNameProvider.setShortenedLoggerNameLength(20);
providers.add(loggerNameProvider);
// Message
providers.add(new MessageJsonProvider());
// Thread name
providers.add(new ThreadNameJsonProvider());
// MDC
providers.add(new MdcJsonProvider());
// Stack trace
StackTraceJsonProvider stackTraceProvider = new StackTraceJsonProvider();
stackTraceProvider.setThrowableConverter(new ShortenedThrowableConverter());
providers.add(stackTraceProvider);
// Custom business context provider
providers.add(new BusinessContextJsonProvider());
return providers;
}
}
// Custom Business Context Provider
class BusinessContextJsonProvider implements JsonProvider<ILoggingEvent> {
@Override
public void writeTo(JsonGenerator generator, ILoggingEvent event) throws IOException {
BusinessContext context = BusinessContextHolder.getCurrentContext();
if (context != null) {
generator.writeObjectField("business_context", context);
}
// Add additional business fields
generator.writeStringField("correlation_id", MDC.get("correlationId"));
generator.writeStringField("user_id", MDC.get("userId"));
generator.writeStringField("session_id", MDC.get("sessionId"));
}
@Override
public void prepareForDeferredProcessing(ILoggingEvent event) {
// No preparation needed
}
}
// Business Context Model
class BusinessContext {
private final String tenantId;
private final String operation;
private final String resource;
private final Map<String, Object> additionalInfo;
public BusinessContext(String tenantId, String operation, String resource) {
this.tenantId = tenantId;
this.operation = operation;
this.resource = resource;
this.additionalInfo = new HashMap<>();
}
public void addInfo(String key, Object value) {
additionalInfo.put(key, value);
}
// Getters
public String getTenantId() { return tenantId; }
public String getOperation() { return operation; }
public String getResource() { return resource; }
public Map<String, Object> getAdditionalInfo() { return additionalInfo; }
}
// Business Context Holder
public class BusinessContextHolder {
private static final ThreadLocal<BusinessContext> currentContext = new ThreadLocal<>();
public static void setCurrentContext(BusinessContext context) {
currentContext.set(context);
}
public static BusinessContext getCurrentContext() {
return currentContext.get();
}
public static void clear() {
currentContext.remove();
}
}
Advanced Configuration Examples
1. Environment-Specific Configurations
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- Environment Detection -->
<springProfile name="development">
<property name="LOG_LEVEL" value="DEBUG"/>
<property name="LOG_PATTERN" value="console"/>
</springProfile>
<springProfile name="staging">
<property name="LOG_LEVEL" value="INFO"/>
<property name="LOG_PATTERN" value="json"/>
</springProfile>
<springProfile name="production">
<property name="LOG_LEVEL" value="WARN"/>
<property name="LOG_PATTERN" value="json"/>
</springProfile>
<!-- Development Console Appender (Pretty Print) -->
<springProfile name="development & console">
<appender name="PRETTY_JSON" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp/>
<logLevel/>
<loggerName/>
<message/>
<mdc/>
<stackTrace/>
</providers>
<jsonGeneratorDecorator class="net.logstash.logback.decorate.PrettyPrintingJsonGeneratorDecorator"/>
</encoder>
</appender>
</springProfile>
<!-- Production JSON Appender -->
<springProfile name="production">
<appender name="PRODUCTION_JSON" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>/var/log/myapp/application.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>/var/log/myapp/application.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>100MB</maxFileSize>
<maxHistory>30</maxHistory>
<totalSizeCap>3GB</totalSizeCap>
</rollingPolicy>
<encoder class="com.example.CustomJsonLayout"/>
</appender>
<!-- Logstash TCP Appender for ELK -->
<appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>logstash.example.com:5000</destination>
<encoder class="net.logstash.logback.encoder.LogstashEncoder">
<customFields>{"app":"myapp","env":"production"}</customFields>
</encoder>
<ssl/>
</appender>
</springProfile>
<root level="${LOG_LEVEL}">
<springProfile name="development & console">
<appender-ref ref="PRETTY_JSON"/>
</springProfile>
<springProfile name="production">
<appender-ref ref="PRODUCTION_JSON"/>
<appender-ref ref="LOGSTASH"/>
</springProfile>
</root>
</configuration>
2. Conditional Logging with JSON
<configuration>
<!-- Filter for sensitive data -->
<appender name="JSON_CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<filter class="com.example.SensitiveDataFilter"/>
<encoder class="net.logstash.logback.encoder.LogstashEncoder">
<customFields>{"service":"user-service"}</customFields>
<includeMdcKeyName>traceId</includeMdcKeyName>
<includeMdcKeyName>spanId</includeMdcKeyName>
<includeMdcKeyName>userId</includeMdcKeyName>
<excludeMdcKeyName>password</excludeMdcKeyName>
<excludeMdcKeyName>token</excludeMdcKeyName>
</encoder>
</appender>
</configuration>
// Custom Filter for Sensitive Data
public class SensitiveDataFilter extends Filter<ILoggingEvent> {
private final List<Pattern> sensitivePatterns = Arrays.asList(
Pattern.compile("(?i)password.*=.*"),
Pattern.compile("(?i)token.*=.*"),
Pattern.compile("(?i)secret.*=.*"),
Pattern.compile("\\b\\d{16}\\b") // Credit card numbers
);
@Override
public FilterReply decide(ILoggingEvent event) {
String message = event.getFormattedMessage();
for (Pattern pattern : sensitivePatterns) {
if (pattern.matcher(message).find()) {
return FilterReply.DENY;
}
}
return FilterReply.NEUTRAL;
}
}
Java Usage Examples
1. Basic Logging with MDC
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
@Service
public class UserService {
private static final Logger logger = LoggerFactory.getLogger(UserService.class);
public User createUser(CreateUserRequest request) {
// Set MDC values for log correlation
MDC.put("userId", request.getId());
MDC.put("operation", "create_user");
MDC.put("traceId", UUID.randomUUID().toString());
try {
logger.info("Creating user with email: {}", request.getEmail());
// Validate user
if (!isValidEmail(request.getEmail())) {
logger.warn("Invalid email format: {}", request.getEmail());
throw new IllegalArgumentException("Invalid email format");
}
User user = userRepository.save(request.toUser());
logger.info("User created successfully: {}", user.getId());
return user;
} catch (Exception e) {
logger.error("Failed to create user: {}", e.getMessage(), e);
throw e;
} finally {
// Clear MDC to prevent memory leaks
MDC.clear();
}
}
@Traced
public User findUserById(String userId) {
MDC.put("userId", userId);
logger.debug("Searching for user: {}", userId);
try {
User user = userRepository.findById(userId)
.orElseThrow(() -> {
logger.warn("User not found: {}", userId);
return new UserNotFoundException("User not found");
});
logger.debug("User found: {}", user.getEmail());
return user;
} finally {
MDC.remove("userId");
}
}
}
2. Structured Logging with Custom Fields
@Component
public class OrderService {
private static final Logger logger = LoggerFactory.getLogger(OrderService.class);
public void processOrder(Order order) {
// Create business context
BusinessContext context = new BusinessContext(
order.getTenantId(),
"process_order",
"orders"
);
context.addInfo("order_amount", order.getAmount());
context.addInfo("currency", order.getCurrency());
BusinessContextHolder.setCurrentContext(context);
try {
MDC.put("orderId", order.getId());
MDC.put("customerId", order.getCustomerId());
logger.info("Processing order",
keyValue("order_id", order.getId()),
keyValue("customer_id", order.getCustomerId()),
keyValue("amount", order.getAmount()),
keyValue("items_count", order.getItems().size())
);
// Process payment
processPayment(order);
// Update inventory
updateInventory(order);
logger.info("Order processed successfully");
} catch (Exception e) {
logger.error("Order processing failed",
keyValue("error_type", e.getClass().getSimpleName()),
keyValue("error_message", e.getMessage())
);
throw e;
} finally {
BusinessContextHolder.clear();
MDC.clear();
}
}
// Helper method for structured logging
private void logOrderEvent(String event, Order order, Map<String, Object> additionalFields) {
logger.info("Order event: {}", event,
keyValue("order_id", order.getId()),
keyValue("event", event),
keyValue("timestamp", Instant.now()),
keyValue("additional_data", additionalFields)
);
}
// Using Marker for additional context
public void cancelOrder(Order order, String reason) {
Marker cancellationMarker = MarkerFactory.getMarker("CANCELLATION");
logger.info(cancellationMarker, "Order cancelled",
keyValue("order_id", order.getId()),
keyValue("reason", reason),
keyValue("cancelled_by", getCurrentUser())
);
}
}
3. JSON Logging Utility Class
public class JsonLogger {
private final Logger logger;
private final Map<String, Object> baseFields;
public JsonLogger(Class<?> clazz) {
this.logger = LoggerFactory.getLogger(clazz);
this.baseFields = new HashMap<>();
initializeBaseFields();
}
private void initializeBaseFields() {
baseFields.put("service", "user-service");
baseFields.put("version", "1.0.0");
baseFields.put("environment", System.getenv("ENV"));
}
public void info(String message, Map<String, Object> fields) {
Map<String, Object> logEntry = createLogEntry("INFO", message, fields);
logger.info(createLogMessage(logEntry));
}
public void error(String message, Exception e, Map<String, Object> fields) {
Map<String, Object> logEntry = createLogEntry("ERROR", message, fields);
logEntry.put("stack_trace", getStackTrace(e));
logger.error(createLogMessage(logEntry));
}
public void debug(String message, Map<String, Object> fields) {
if (logger.isDebugEnabled()) {
Map<String, Object> logEntry = createLogEntry("DEBUG", message, fields);
logger.debug(createLogMessage(logEntry));
}
}
private Map<String, Object> createLogEntry(String level, String message, Map<String, Object> fields) {
Map<String, Object> entry = new HashMap<>(baseFields);
entry.put("timestamp", Instant.now().toString());
entry.put("level", level);
entry.put("message", message);
entry.put("thread", Thread.currentThread().getName());
if (fields != null) {
entry.putAll(fields);
}
// Add MDC context
Map<String, String> mdc = MDC.getCopyOfContextMap();
if (mdc != null) {
entry.put("mdc", mdc);
}
return entry;
}
private String createLogMessage(Map<String, Object> logEntry) {
try {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(logEntry);
} catch (JsonProcessingException e) {
return "{\"error\":\"Failed to serialize log entry\"}";
}
}
private String getStackTrace(Exception e) {
StringWriter sw = new StringWriter();
e.printStackTrace(new PrintWriter(sw));
return sw.toString();
}
}
// Usage example
@Service
public class PaymentService {
private final JsonLogger logger = new JsonLogger(PaymentService.class);
public void processPayment(PaymentRequest request) {
Map<String, Object> fields = new HashMap<>();
fields.put("payment_id", request.getId());
fields.put("amount", request.getAmount());
fields.put("currency", request.getCurrency());
logger.info("Processing payment", fields);
try {
// Process payment logic
logger.info("Payment processed successfully", fields);
} catch (Exception e) {
fields.put("error", e.getMessage());
logger.error("Payment processing failed", e, fields);
throw e;
}
}
}
Sample JSON Output
1. Basic Info Log
{
"timestamp": "2023-10-15T10:30:45.123Z",
"level": "INFO",
"logger": "com.example.UserService",
"message": "User created successfully",
"thread": "http-nio-8080-exec-1",
"service": "user-service",
"mdc": {
"traceId": "abc123def456",
"userId": "user-789",
"operation": "create_user"
}
}
2. Error Log with Stack Trace
{
"timestamp": "2023-10-15T10:31:22.456Z",
"level": "ERROR",
"logger": "com.example.OrderService",
"message": "Payment processing failed",
"thread": "http-nio-8080-exec-2",
"service": "order-service",
"mdc": {
"traceId": "xyz789uvw012",
"orderId": "order-456"
},
"stack_trace": "com.example.PaymentException: Insufficient funds\n\tat com.example.PaymentService.process(PaymentService.java:45)",
"business_context": {
"tenantId": "tenant-123",
"operation": "process_payment",
"resource": "payments",
"additionalInfo": {
"amount": 99.99,
"currency": "USD"
}
}
}
3. Debug Log with Custom Fields
{
"timestamp": "2023-10-15T10:32:15.789Z",
"level": "DEBUG",
"logger": "com.example.InventoryService",
"message": "Inventory check completed",
"thread": "task-scheduler-1",
"service": "inventory-service",
"duration_ms": 45,
"items_checked": 15,
"in_stock": 12,
"out_of_stock": 3
}
Best Practices
- Include Context: Always include correlation IDs, user context, and business identifiers
- Avoid Sensitive Data: Never log passwords, tokens, or personal identifiable information
- Use Consistent Field Names: Standardize field names across services
- Set Appropriate Log Levels: Use DEBUG for development, INFO for production
- Monitor Log Volume: Be mindful of log size and implement log rotation
- Test Logging Configuration: Verify JSON format in different environments
// Good practice - structured logging with context
logger.info("User login successful",
keyValue("user_id", userId),
keyValue("ip_address", ipAddress),
keyValue("user_agent", userAgent),
keyValue("login_method", "password")
);
// Bad practice - unstructured logging
logger.info("User " + userId + " logged in from " + ipAddress);
Conclusion
Logback JSON encoder provides:
- Structured logging for better log analysis
- Machine-readable format for log aggregation systems
- Rich context with MDC and custom fields
- Performance benefits with async appenders
- Environment-specific configurations
By implementing JSON logging with proper context and structure, you significantly improve your application's observability and make troubleshooting much more efficient in distributed systems.