Introduction to Distributed Context
Distributed context propagation enables maintaining request-scoped information across service boundaries in microservices architectures. It's essential for tracing, logging, security, and business logic that spans multiple services.
Core Context Models
Context Interface and Implementation
public interface DistributedContext {
String getTraceId();
String getSpanId();
String getParentSpanId();
String getVersion();
<T> T get(String key);
<T> void put(String key, T value);
boolean containsKey(String key);
void remove(String key);
Map<String, Object> toMap();
void fromMap(Map<String, Object> contextMap);
// Headers for propagation
Map<String, String> toHeaders();
void fromHeaders(Map<String, String> headers);
}
public class DefaultDistributedContext implements DistributedContext {
private final String traceId;
private final String spanId;
private final String parentSpanId;
private final String version;
private final Map<String, Object> baggage;
private DefaultDistributedContext(Builder builder) {
this.traceId = builder.traceId;
this.spanId = builder.spanId;
this.parentSpanId = builder.parentSpanId;
this.version = builder.version;
this.baggage = new ConcurrentHashMap<>(builder.baggage);
}
@Override
public String getTraceId() { return traceId; }
@Override
public String getSpanId() { return spanId; }
@Override
public String getParentSpanId() { return parentSpanId; }
@Override
public String getVersion() { return version; }
@Override
@SuppressWarnings("unchecked")
public <T> T get(String key) {
return (T) baggage.get(key);
}
@Override
public <T> void put(String key, T value) {
baggage.put(key, value);
}
@Override
public boolean containsKey(String key) {
return baggage.containsKey(key);
}
@Override
public void remove(String key) {
baggage.remove(key);
}
@Override
public Map<String, Object> toMap() {
Map<String, Object> map = new HashMap<>(baggage);
map.put("traceId", traceId);
map.put("spanId", spanId);
map.put("parentSpanId", parentSpanId);
map.put("version", version);
return map;
}
@Override
public void fromMap(Map<String, Object> contextMap) {
baggage.clear();
contextMap.forEach((key, value) -> {
if (!"traceId".equals(key) && !"spanId".equals(key) &&
!"parentSpanId".equals(key) && !"version".equals(key)) {
baggage.put(key, value);
}
});
}
@Override
public Map<String, String> toHeaders() {
Map<String, String> headers = new HashMap<>();
headers.put("X-Trace-Id", traceId);
headers.put("X-Span-Id", spanId);
headers.put("X-Parent-Span-Id", parentSpanId != null ? parentSpanId : "");
headers.put("X-Context-Version", version);
// Serialize baggage
baggage.forEach((key, value) -> {
if (value instanceof String) {
headers.put("X-Baggage-" + key, (String) value);
} else {
headers.put("X-Baggage-" + key, value.toString());
}
});
return headers;
}
@Override
public void fromHeaders(Map<String, String> headers) {
baggage.clear();
headers.forEach((key, value) -> {
if (key.startsWith("X-Baggage-")) {
String baggageKey = key.substring("X-Baggage-".length());
baggage.put(baggageKey, value);
}
});
}
public static class Builder {
private String traceId;
private String spanId;
private String parentSpanId;
private String version = "1.0";
private Map<String, Object> baggage = new HashMap<>();
public Builder traceId(String traceId) {
this.traceId = traceId;
return this;
}
public Builder spanId(String spanId) {
this.spanId = spanId;
return this;
}
public Builder parentSpanId(String parentSpanId) {
this.parentSpanId = parentSpanId;
return this;
}
public Builder version(String version) {
this.version = version;
return this;
}
public Builder baggage(Map<String, Object> baggage) {
this.baggage = baggage;
return this;
}
public Builder addBaggage(String key, Object value) {
this.baggage.put(key, value);
return this;
}
public DefaultDistributedContext build() {
if (traceId == null) {
traceId = generateId();
}
if (spanId == null) {
spanId = generateId();
}
return new DefaultDistributedContext(this);
}
private String generateId() {
return UUID.randomUUID().toString().replace("-", "").substring(0, 16);
}
}
}
Thread-Local Context Management
Context Holder with MDC Integration
public class DistributedContextHolder {
private static final ThreadLocal<DistributedContext> CONTEXT_HOLDER =
new ThreadLocal<>();
private static final Logger logger = LoggerFactory.getLogger(DistributedContextHolder.class);
public static void setContext(DistributedContext context) {
if (context == null) {
clearContext();
return;
}
CONTEXT_HOLDER.set(context);
// Sync with SLF4J MDC for logging
updateMDC(context);
logger.debug("Context set for thread: {}", Thread.currentThread().getName());
}
public static DistributedContext getContext() {
DistributedContext context = CONTEXT_HOLDER.get();
if (context == null) {
context = createEmptyContext();
setContext(context);
}
return context;
}
public static DistributedContext getContextOrNull() {
return CONTEXT_HOLDER.get();
}
public static void clearContext() {
CONTEXT_HOLDER.remove();
clearMDC();
logger.debug("Context cleared for thread: {}", Thread.currentThread().getName());
}
public static boolean hasContext() {
return CONTEXT_HOLDER.get() != null;
}
public static <T> T getFromContext(String key) {
DistributedContext context = getContext();
return context.get(key);
}
public static <T> void putInContext(String key, T value) {
DistributedContext context = getContext();
context.put(key, value);
updateMDC(context); // Update MDC after modification
}
private static void updateMDC(DistributedContext context) {
// Clear existing MDC
MDC.clear();
// Set tracing information
MDC.put("traceId", context.getTraceId());
MDC.put("spanId", context.getSpanId());
// Set baggage as MDC context
Map<String, Object> baggage = context.toMap();
baggage.forEach((key, value) -> {
if (value != null) {
MDC.put(key, value.toString());
}
});
}
private static void clearMDC() {
MDC.clear();
}
private static DistributedContext createEmptyContext() {
return new DefaultDistributedContext.Builder().build();
}
// Context propagation for async operations
public static Runnable wrap(Runnable task) {
DistributedContext currentContext = getContext();
return () -> {
DistributedContext originalContext = getContextOrNull();
try {
setContext(currentContext);
task.run();
} finally {
if (originalContext != null) {
setContext(originalContext);
} else {
clearContext();
}
}
};
}
public static <T> Callable<T> wrap(Callable<T> task) {
DistributedContext currentContext = getContext();
return () -> {
DistributedContext originalContext = getContextOrNull();
try {
setContext(currentContext);
return task.call();
} finally {
if (originalContext != null) {
setContext(originalContext);
} else {
clearContext();
}
}
};
}
}
Spring Boot Integration
Auto-Configuration and Filters
@Configuration
@EnableConfigurationProperties(DistributedContextProperties.class)
public class DistributedContextAutoConfiguration {
@Bean
@ConditionalOnMissingBean
public DistributedContextFilter distributedContextFilter(
DistributedContextProperties properties) {
return new DistributedContextFilter(properties);
}
@Bean
@ConditionalOnMissingBean
public ContextPropagationInterceptor contextPropagationInterceptor() {
return new ContextPropagationInterceptor();
}
@Bean
public AsyncConfigurer asyncConfigurer() {
return new AsyncConfigurer() {
@Override
public Executor getAsyncExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(10);
executor.setMaxPoolSize(50);
executor.setQueueCapacity(100);
executor.setThreadNamePrefix("Async-");
executor.setTaskDecorator(DistributedContextHolder::wrap);
executor.initialize();
return executor;
}
};
}
}
@Component
public class DistributedContextFilter implements Filter {
private static final Logger logger = LoggerFactory.getLogger(DistributedContextFilter.class);
private final DistributedContextProperties properties;
public DistributedContextFilter(DistributedContextProperties properties) {
this.properties = properties;
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
if (!(request instanceof HttpServletRequest) || !(response instanceof HttpServletResponse)) {
chain.doFilter(request, response);
return;
}
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse = (HttpServletResponse) response;
// Extract context from headers
DistributedContext context = extractContextFromHeaders(httpRequest);
try {
// Set context for current request
DistributedContextHolder.setContext(context);
// Add context to response headers for downstream services
addContextToResponseHeaders(httpResponse, context);
chain.doFilter(request, response);
} finally {
DistributedContextHolder.clearContext();
}
}
private DistributedContext extractContextFromHeaders(HttpServletRequest request) {
DefaultDistributedContext.Builder builder = new DefaultDistributedContext.Builder();
// Extract tracing headers
String traceId = getHeader(request, "X-Trace-Id");
String spanId = getHeader(request, "X-Span-Id");
String parentSpanId = getHeader(request, "X-Parent-Span-Id");
String version = getHeader(request, "X-Context-Version");
if (traceId != null) builder.traceId(traceId);
if (spanId != null) builder.spanId(spanId);
if (parentSpanId != null) builder.parentSpanId(parentSpanId);
if (version != null) builder.version(version);
// Extract baggage headers
Enumeration<String> headerNames = request.getHeaderNames();
while (headerNames.hasMoreElements()) {
String headerName = headerNames.nextElement();
if (headerName.startsWith("X-Baggage-")) {
String baggageKey = headerName.substring("X-Baggage-".length());
String baggageValue = request.getHeader(headerName);
builder.addBaggage(baggageKey, baggageValue);
}
}
return builder.build();
}
private void addContextToResponseHeaders(HttpServletResponse response, DistributedContext context) {
if (properties.isPropagateResponseHeaders()) {
Map<String, String> headers = context.toHeaders();
headers.forEach(response::setHeader);
}
}
private String getHeader(HttpServletRequest request, String headerName) {
String value = request.getHeader(headerName);
return (value != null && !value.trim().isEmpty()) ? value : null;
}
}
@ConfigurationProperties(prefix = "distributed.context")
public class DistributedContextProperties {
private boolean enabled = true;
private boolean propagateResponseHeaders = true;
private List<String> propagateHeaders = Arrays.asList(
"X-Trace-Id", "X-Span-Id", "X-Parent-Span-Id", "X-Context-Version", "X-Baggage-*"
);
private List<String> sensitiveFields = Arrays.asList("password", "token", "secret");
// Getters and setters
public boolean isEnabled() { return enabled; }
public void setEnabled(boolean enabled) { this.enabled = enabled; }
public boolean isPropagateResponseHeaders() { return propagateResponseHeaders; }
public void setPropagateResponseHeaders(boolean propagateResponseHeaders) {
this.propagateResponseHeaders = propagateResponseHeaders;
}
public List<String> getPropagateHeaders() { return propagateHeaders; }
public void setPropagateHeaders(List<String> propagateHeaders) {
this.propagateHeaders = propagateHeaders;
}
public List<String> getSensitiveFields() { return sensitiveFields; }
public void setSensitiveFields(List<String> sensitiveFields) {
this.sensitiveFields = sensitiveFields;
}
}
HTTP Client Integration
RestTemplate Interceptor
@Component
public class ContextPropagationInterceptor implements ClientHttpRequestInterceptor {
private static final Logger logger = LoggerFactory.getLogger(ContextPropagationInterceptor.class);
@Override
public ClientHttpResponse intercept(HttpRequest request, byte[] body,
ClientHttpRequestExecution execution) throws IOException {
DistributedContext context = DistributedContextHolder.getContext();
if (context != null) {
// Propagate context via headers
Map<String, String> headers = context.toHeaders();
headers.forEach(request.getHeaders()::set);
logger.debug("Propagated context to downstream service: {}",
request.getURI().getHost());
}
return execution.execute(request, body);
}
}
@Configuration
public class RestTemplateConfig {
@Bean
public RestTemplate restTemplate(ContextPropagationInterceptor interceptor) {
RestTemplate restTemplate = new RestTemplate();
// Add context propagation interceptor
List<ClientHttpRequestInterceptor> interceptors =
new ArrayList<>(restTemplate.getInterceptors());
interceptors.add(interceptor);
restTemplate.setInterceptors(interceptors);
return restTemplate;
}
}
WebClient Integration
@Component
public class WebClientContextPropagator {
public WebClient.Builder contextAwareWebClient(WebClient.Builder builder) {
return builder.filter(this::contextPropagationFilter);
}
private Mono<ClientResponse> contextPropagationFilter(ClientRequest request,
ExchangeFunction next) {
return Mono.deferContextual(contextView -> {
ClientRequest.Builder requestBuilder = ClientRequest.from(request);
// Propagate context from ThreadLocal
DistributedContext context = DistributedContextHolder.getContext();
if (context != null) {
Map<String, String> headers = context.toHeaders();
headers.forEach(requestBuilder::header);
}
return next.exchange(requestBuilder.build());
});
}
}
// Reactive context propagation
@Component
public class ReactiveContextPropagator {
public static final String CONTEXT_KEY = "distributedContext";
public <T> Mono<T> withContext(Mono<T> mono) {
return Mono.deferContextual(contextView -> {
DistributedContext context = contextView.getOrDefault(CONTEXT_KEY, null);
if (context != null) {
return Mono.fromCallable(() -> {
DistributedContextHolder.setContext(context);
return null;
}).then(mono)
.doFinally(signal -> DistributedContextHolder.clearContext());
}
return mono;
});
}
public static Context propagateContext() {
DistributedContext context = DistributedContextHolder.getContext();
if (context != null) {
return Context.of(CONTEXT_KEY, context);
}
return Context.empty();
}
}
Messaging Integration
Kafka Context Propagation
@Component
public class KafkaContextPropagator {
private static final String TRACE_HEADER = "traceId";
private static final String SPAN_HEADER = "spanId";
private static final String CONTEXT_PREFIX = "ctx-";
@Autowired
private KafkaTemplate<String, Object> kafkaTemplate;
public void sendWithContext(String topic, Object message) {
sendWithContext(topic, null, message);
}
public void sendWithContext(String topic, String key, Object message) {
DistributedContext context = DistributedContextHolder.getContext();
kafkaTemplate.send(topic, key, message).addCallback(
result -> {
if (result != null) {
// Add context to headers
ProducerRecord<String, Object> record = result.getProducerRecord();
addContextToHeaders(record.headers(), context);
}
},
exception -> {
logger.error("Failed to send message to topic: {}", topic, exception);
}
);
}
private void addContextToHeaders(RecordHeaders headers, DistributedContext context) {
if (context != null) {
headers.add(TRACE_HEADER, context.getTraceId().getBytes());
headers.add(SPAN_HEADER, context.getSpanId().getBytes());
// Add baggage as headers
context.toMap().forEach((key, value) -> {
if (value != null) {
headers.add(CONTEXT_PREFIX + key, value.toString().getBytes());
}
});
}
}
}
@Configuration
public class KafkaConsumerConfig {
@Bean
public ConcurrentKafkaListenerContainerFactory<String, Object> contextAwareKafkaListenerContainerFactory(
ConsumerFactory<String, Object> consumerFactory) {
ConcurrentKafkaListenerContainerFactory<String, Object> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory);
factory.setRecordInterceptor(contextPropagationInterceptor());
return factory;
}
@Bean
public RecordInterceptor<String, Object> contextPropagationInterceptor() {
return record -> {
DistributedContext context = extractContextFromHeaders(record.headers());
DistributedContextHolder.setContext(context);
try {
return record;
} finally {
DistributedContextHolder.clearContext();
}
};
}
private DistributedContext extractContextFromHeaders(Headers headers) {
DefaultDistributedContext.Builder builder = new DefaultDistributedContext.Builder();
Header traceHeader = headers.lastHeader("traceId");
Header spanHeader = headers.lastHeader("spanId");
if (traceHeader != null) {
builder.traceId(new String(traceHeader.value()));
}
if (spanHeader != null) {
builder.spanId(new String(spanHeader.value()));
}
// Extract baggage from headers
for (Header header : headers) {
if (header.key().startsWith("ctx-")) {
String baggageKey = header.key().substring("ctx-".length());
String baggageValue = new String(header.value());
builder.addBaggage(baggageKey, baggageValue);
}
}
return builder.build();
}
}
Database Context Propagation
JPA/Hibernate Integration
@Component
public class JpaContextPropagator {
@EventListener
public void handlePreInsert(PreInsertEvent event) {
injectContextFields(event.getEntity());
}
@EventListener
public void handlePreUpdate(PreUpdateEvent event) {
injectContextFields(event.getEntity());
}
private void injectContextFields(Object entity) {
if (entity instanceof ContextAwareEntity) {
DistributedContext context = DistributedContextHolder.getContext();
if (context != null) {
((ContextAwareEntity) entity).setTraceId(context.getTraceId());
((ContextAwareEntity) entity).setContextData(context.toMap());
}
}
}
}
@MappedSuperclass
public abstract class ContextAwareEntity {
@Column(name = "trace_id")
private String traceId;
@Column(name = "context_data")
@Convert(converter = MapToJsonConverter.class)
private Map<String, Object> contextData;
// Getters and setters
public String getTraceId() { return traceId; }
public void setTraceId(String traceId) { this.traceId = traceId; }
public Map<String, Object> getContextData() { return contextData; }
public void setContextData(Map<String, Object> contextData) {
this.contextData = contextData;
}
}
@Converter
public class MapToJsonConverter implements AttributeConverter<Map<String, Object>, String> {
private static final ObjectMapper mapper = new ObjectMapper();
@Override
public String convertToDatabaseColumn(Map<String, Object> attribute) {
try {
return mapper.writeValueAsString(attribute);
} catch (JsonProcessingException e) {
throw new RuntimeException("Failed to convert map to JSON", e);
}
}
@Override
public Map<String, Object> convertToEntityAttribute(String dbData) {
try {
if (dbData == null || dbData.trim().isEmpty()) {
return new HashMap<>();
}
return mapper.readValue(dbData,
new TypeReference<Map<String, Object>>() {});
} catch (JsonProcessingException e) {
throw new RuntimeException("Failed to convert JSON to map", e);
}
}
}
Advanced Context Features
Context Scoping and Management
@Service
public class ContextManager {
private static final Logger logger = LoggerFactory.getLogger(ContextManager.class);
public void runInContext(Runnable task, Map<String, Object> additionalBaggage) {
DistributedContext currentContext = DistributedContextHolder.getContext();
DistributedContext newContext = enhanceContext(currentContext, additionalBaggage);
DistributedContextHolder.setContext(newContext);
try {
task.run();
} finally {
DistributedContextHolder.setContext(currentContext);
}
}
public <T> T runInContext(Callable<T> task, Map<String, Object> additionalBaggage) {
DistributedContext currentContext = DistributedContextHolder.getContext();
DistributedContext newContext = enhanceContext(currentContext, additionalBaggage);
DistributedContextHolder.setContext(newContext);
try {
return task.call();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
DistributedContextHolder.setContext(currentContext);
}
}
public DistributedContext createChildContext() {
DistributedContext parentContext = DistributedContextHolder.getContext();
return new DefaultDistributedContext.Builder()
.traceId(parentContext.getTraceId())
.parentSpanId(parentContext.getSpanId())
.baggage(new HashMap<>(parentContext.toMap()))
.build();
}
private DistributedContext enhanceContext(DistributedContext context,
Map<String, Object> additionalBaggage) {
DefaultDistributedContext.Builder builder = new DefaultDistributedContext.Builder()
.traceId(context.getTraceId())
.spanId(context.getSpanId())
.parentSpanId(context.getParentSpanId())
.version(context.getVersion())
.baggage(new HashMap<>(context.toMap()));
if (additionalBaggage != null) {
additionalBaggage.forEach(builder::addBaggage);
}
return builder.build();
}
public void logContext(String message) {
DistributedContext context = DistributedContextHolder.getContext();
if (context != null) {
logger.info("[Trace:{}][Span:{}] {}",
context.getTraceId(),
context.getSpanId(),
message);
} else {
logger.info("{}", message);
}
}
}
Context Validation and Security
@Component
public class ContextValidator {
public void validateContext(DistributedContext context) {
if (context == null) {
throw new InvalidContextException("Context cannot be null");
}
if (context.getTraceId() == null || context.getTraceId().trim().isEmpty()) {
throw new InvalidContextException("Trace ID is required");
}
if (context.getSpanId() == null || context.getSpanId().trim().isEmpty()) {
throw new InvalidContextException("Span ID is required");
}
// Validate baggage size to prevent header overflow
Map<String, Object> baggage = context.toMap();
if (baggage.size() > 100) {
throw new InvalidContextException("Too much baggage in context");
}
// Validate individual baggage size
baggage.forEach((key, value) -> {
if (value != null && value.toString().length() > 1024) {
throw new InvalidContextException(
String.format("Baggage value for key '%s' is too large", key));
}
});
}
public void sanitizeContext(DistributedContext context,
List<String> sensitiveFields) {
Map<String, Object> baggage = context.toMap();
sensitiveFields.forEach(field -> {
if (baggage.containsKey(field)) {
baggage.put(field, "***REDACTED***");
}
});
}
}
Testing Distributed Context
@SpringBootTest
@TestPropertySource(properties = {
"distributed.context.enabled=true",
"distributed.context.propagate-response-headers=true"
})
public class DistributedContextTest {
@Autowired
private DistributedContextFilter contextFilter;
@Autowired
private ContextPropagationInterceptor interceptor;
@Test
public void testContextCreation() {
DistributedContext context = new DefaultDistributedContext.Builder()
.traceId("test-trace-123")
.spanId("test-span-456")
.addBaggage("user", "test-user")
.build();
assertNotNull(context);
assertEquals("test-trace-123", context.getTraceId());
assertEquals("test-span-456", context.getSpanId());
assertEquals("test-user", context.get("user"));
}
@Test
public void testThreadLocalPropagation() {
DistributedContext context = new DefaultDistributedContext.Builder().build();
DistributedContextHolder.setContext(context);
assertEquals(context, DistributedContextHolder.getContext());
DistributedContextHolder.clearContext();
assertNotEquals(context, DistributedContextHolder.getContext());
}
@Test
public void testAsyncContextPropagation() throws Exception {
DistributedContext context = new DefaultDistributedContext.Builder()
.traceId("async-trace")
.build();
DistributedContextHolder.setContext(context);
CompletableFuture<String> future = CompletableFuture.supplyAsync(
DistributedContextHolder.wrap(() -> {
DistributedContext asyncContext = DistributedContextHolder.getContext();
return asyncContext.getTraceId();
})
);
assertEquals("async-trace", future.get());
}
}
Configuration
Application Properties
# application.yml
distributed:
context:
enabled: true
propagate-response-headers: true
propagate-headers:
- "X-Trace-Id"
- "X-Span-Id"
- "X-Parent-Span-Id"
- "X-Context-Version"
- "X-Baggage-*"
sensitive-fields:
- "password"
- "token"
- "authorization"
- "secret"
logging:
pattern:
level: "%5p [%X{traceId:-},%X{spanId:-}]"
Maven Dependencies
<dependencies> <!-- Spring Boot --> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-web</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-data-jpa</artifactId> </dependency> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> </dependency> <!-- JSON Processing --> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> </dependency> </dependencies>
Conclusion
This distributed context implementation provides:
- Cross-Service Context Propagation - Maintain context across HTTP, messaging, and database boundaries
- Thread-Local Management - Automatic context handling with cleanup
- Async Support - Context propagation for CompletableFuture and reactive programming
- Integration Frameworks - Spring Boot, Kafka, JPA, and WebClient integration
- Security and Validation - Context validation and sensitive data handling
- Observability - MDC integration for structured logging
The system ensures that distributed tracing, user context, and business data are consistently available throughout the entire request lifecycle in microservices architectures.
Java Observability, Logging Intelligence & AI-Driven Monitoring (APM, Tracing, Logs & Anomaly Detection)
https://macronepal.com/blog/beyond-metrics-observing-serverless-and-traditional-java-applications-with-thundra-apm/
Explains using Thundra APM to observe both serverless and traditional Java applications by combining tracing, metrics, and logs into a unified observability platform for faster debugging and performance insights.
https://macronepal.com/blog/dynatrace-oneagent-in-java-2/
Explains Dynatrace OneAgent for Java, which automatically instruments JVM applications to capture metrics, traces, and logs, enabling full-stack monitoring and root-cause analysis with minimal configuration.
https://macronepal.com/blog/lightstep-java-sdk-distributed-tracing-and-observability-implementation/
Explains Lightstep Java SDK for distributed tracing, helping developers track requests across microservices and identify latency issues using OpenTelemetry-based observability.
https://macronepal.com/blog/honeycomb-io-beeline-for-java-complete-guide-2/
Explains Honeycomb Beeline for Java, which provides high-cardinality observability and deep query capabilities to understand complex system behavior and debug distributed systems efficiently.
https://macronepal.com/blog/lumigo-for-serverless-in-java-complete-distributed-tracing-guide-2/
Explains Lumigo for Java serverless applications, offering automatic distributed tracing, log correlation, and error tracking to simplify debugging in cloud-native environments. (Lumigo Docs)
https://macronepal.com/blog/from-noise-to-signals-implementing-log-anomaly-detection-in-java-applications/
Explains how to detect anomalies in Java logs using behavioral patterns and machine learning techniques to separate meaningful incidents from noisy log data and improve incident response.
https://macronepal.com/blog/ai-powered-log-analysis-in-java-from-reactive-debugging-to-proactive-insights/
Explains AI-driven log analysis for Java applications, shifting from manual debugging to predictive insights that identify issues early and improve system reliability using intelligent log processing.
https://macronepal.com/blog/titliel-java-logging-best-practices/
Explains best practices for Java logging, focusing on structured logs, proper log levels, performance optimization, and ensuring logs are useful for debugging and observability systems.
https://macronepal.com/blog/seeking-a-loguru-for-java-the-quest-for-elegant-and-simple-logging/
Explains the search for simpler, more elegant logging frameworks in Java, comparing modern logging approaches that aim to reduce complexity while improving readability and developer experience.