Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
51 commits
Select commit Hold shift + click to select a range
7224607
collection: Queue Instrumentation
adinauer Mar 31, 2026
76a907e
feat(core): Add enableQueueTracing option and messaging span data con…
adinauer Mar 31, 2026
5f063c1
changelog
adinauer Mar 31, 2026
f44c735
feat(samples): Add Kafka producer and consumer to Spring Boot 3 sampl…
adinauer Apr 1, 2026
be5af44
feat(spring-jakarta): Add Kafka producer instrumentation
adinauer Apr 1, 2026
5049ffc
changelog
adinauer Apr 1, 2026
6099047
feat(spring-jakarta): Add Kafka consumer instrumentation
adinauer Apr 1, 2026
1f00027
changelog
adinauer Apr 1, 2026
572dc2d
feat(spring-boot-jakarta): Add Kafka queue auto-configuration
adinauer Apr 1, 2026
04a4689
changelog
adinauer Apr 1, 2026
271fb8b
test(samples): Add Kafka queue system tests for Spring Boot 3
adinauer Apr 2, 2026
02f2007
docs: Add rule against force-pushing stack branches
adinauer Apr 2, 2026
6f90ea7
docs: Also prohibit --amend on stack branches
adinauer Apr 2, 2026
10a5c63
feat(samples): Add Kafka producer and consumer to Spring Boot 3 OTel …
adinauer Apr 7, 2026
ce2ea96
fix(spring-boot-jakarta): Disable Sentry Kafka instrumentation when O…
adinauer Apr 7, 2026
414b118
fix(core): Add Kafka span origins to ignored list for OpenTelemetry
adinauer Apr 7, 2026
915e42b
ref(spring-jakarta): Replace SentryKafkaProducerWrapper with SentryPr…
adinauer Apr 9, 2026
2b74bab
Merge branch 'feat/queue-instrumentation-producer' into feat/queue-in…
adinauer Apr 9, 2026
be3a2ba
fix(spring-jakarta): Update consumer references and add reflection wa…
adinauer Apr 9, 2026
ce87037
Merge branch 'feat/queue-instrumentation-consumer' into feat/queue-in…
adinauer Apr 9, 2026
5d297ac
Merge branch 'feat/queue-instrumentation-autoconfig' into feat/queue-…
adinauer Apr 9, 2026
4fa767e
Merge branch 'feat/queue-instrumentation-e2e' into feat/queue-instrum…
adinauer Apr 9, 2026
fb7e217
Merge branch 'feat/queue-instrumentation-otel-samples' into fix/queue…
adinauer Apr 9, 2026
fdb3a03
fix(spring-jakarta): Initialize Sentry in SentryProducerInterceptorTest
adinauer Apr 9, 2026
6450f63
Merge branch 'feat/queue-instrumentation-producer' into feat/queue-in…
adinauer Apr 9, 2026
f92f47c
fix(spring-jakarta): Initialize Sentry in consumer test, fix API file…
adinauer Apr 9, 2026
b8ead5f
Merge branch 'feat/queue-instrumentation-consumer' into feat/queue-in…
adinauer Apr 9, 2026
e607d67
Merge branch 'feat/queue-instrumentation-autoconfig' into feat/queue-…
adinauer Apr 9, 2026
cf9ba0c
Merge branch 'feat/queue-instrumentation-e2e' into feat/queue-instrum…
adinauer Apr 9, 2026
24348b2
Merge branch 'feat/queue-instrumentation-otel-samples' into fix/queue…
adinauer Apr 9, 2026
2501e57
fix(spring-jakarta): Clean up stale ThreadLocal context in Kafka cons…
adinauer Apr 10, 2026
320e805
fix(spring-jakarta): Fork root scopes and skip when OTel is active in…
adinauer Apr 10, 2026
6d91bdc
fix(spring-jakarta): Guard entire span lifecycle in Kafka producer in…
adinauer Apr 10, 2026
e86169f
fix(spring-jakarta): [Queue Instrumentation 12] Add Kafka retry count…
adinauer Apr 10, 2026
24cff6d
fix(spring-jakarta): [Queue Instrumentation 13] Align enqueue time wi…
adinauer Apr 10, 2026
ca69447
ref(kafka): Extract sentry-kafka module from spring-jakarta
adinauer Apr 13, 2026
0734938
changelog
adinauer Apr 13, 2026
703d631
Merge pull request #5250 from getsentry/feat/queue-instrumentation-op…
adinauer Apr 29, 2026
007d23f
Merge pull request #5253 from getsentry/feat/queue-instrumentation-sa…
adinauer Apr 29, 2026
9830c0c
Merge pull request #5254 from getsentry/feat/queue-instrumentation-pr…
adinauer Apr 29, 2026
a57d7aa
Merge pull request #5255 from getsentry/feat/queue-instrumentation-co…
adinauer Apr 29, 2026
46225de
Merge pull request #5259 from getsentry/feat/queue-instrumentation-au…
adinauer Apr 29, 2026
e3ca6ef
Merge pull request #5260 from getsentry/feat/queue-instrumentation-e2e
adinauer Apr 29, 2026
4b8feb0
Merge pull request #5265 from getsentry/feat/queue-instrumentation-ot…
adinauer Apr 29, 2026
ccd7c49
Merge pull request #5274 from getsentry/fix/queue-instrumentation-ote…
adinauer Apr 29, 2026
f5b6353
Merge pull request #5279 from getsentry/fix/queue-instrumentation-thr…
adinauer Apr 29, 2026
0e5c3c0
Merge pull request #5280 from getsentry/fix/queue-instrumentation-roo…
adinauer Apr 29, 2026
69f38d4
Merge pull request #5281 from getsentry/fix/queue-instrumentation-pro…
adinauer Apr 29, 2026
8355a1e
Merge pull request #5282 from getsentry/fix/queue-instrumentation-ret…
adinauer Apr 29, 2026
55c5586
Merge pull request #5283 from getsentry/fix/queue-instrumentation-enq…
adinauer Apr 29, 2026
592a210
Merge pull request #5288 from getsentry/feat/queue-instrumentation-ka…
adinauer Apr 29, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .cursor/rules/pr.mdc
Original file line number Diff line number Diff line change
Expand Up @@ -258,3 +258,5 @@ git push
**Never merge into the collection branch.** Syncing only happens between stack PR branches. The collection branch is untouched until the user merges PRs through GitHub.

Prefer merge over rebase — it preserves commit history, doesn't invalidate existing review comments, and avoids the need for force-pushing. Only rebase if explicitly requested.

**Never amend or force-push stack branches.** Do not use `git commit --amend`, `--force`, or `--force-with-lease` on branches that are part of a stack. Amending a pushed commit requires a force-push, which can cause GitHub to auto-merge or auto-close other PRs in the stack. If a commit needs fixing, add a new fixup commit instead.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@

### Features

- Add `sentry-kafka` module for Kafka queue instrumentation without Spring ([#5288](https://github.com/getsentry/sentry-java/pull/5288))
- Add Kafka queue tracing for Spring Boot 3 ([#5254](https://github.com/getsentry/sentry-java/pull/5254)), ([#5255](https://github.com/getsentry/sentry-java/pull/5255)), ([#5256](https://github.com/getsentry/sentry-java/pull/5256))
- Add `enableQueueTracing` option and messaging span data conventions ([#5250](https://github.com/getsentry/sentry-java/pull/5250))
- Prevent cross-organization trace continuation ([#5136](https://github.com/getsentry/sentry-java/pull/5136))
- By default, the SDK now extracts the organization ID from the DSN (e.g. `o123.ingest.sentry.io`) and compares it with the `sentry-org_id` value in incoming baggage headers. When the two differ, the SDK starts a fresh trace instead of continuing the foreign one. This guards against accidentally linking traces across organizations.
- New option `enableStrictTraceContinuation` (default `false`): when enabled, both the SDK's org ID **and** the incoming baggage org ID must be present and match for a trace to be continued. Traces with a missing org ID on either side are rejected. Configurable via code (`setStrictTraceContinuation(true)`), `sentry.properties` (`enable-strict-trace-continuation=true`), Android manifest (`io.sentry.strict-trace-continuation.enabled`), or Spring Boot (`sentry.strict-trace-continuation=true`).
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ Sentry SDK for Java and Android
| sentry | ![Maven Central Version](https://img.shields.io/maven-central/v/io.sentry/sentry?style=for-the-badge&logo=sentry&color=green) | 21 |
| sentry-jul | ![Maven Central Version](https://img.shields.io/maven-central/v/io.sentry/sentry-jul?style=for-the-badge&logo=sentry&color=green) |
| sentry-jdbc | ![Maven Central Version](https://img.shields.io/maven-central/v/io.sentry/sentry-jdbc?style=for-the-badge&logo=sentry&color=green) |
| sentry-kafka | ![Maven Central Version](https://img.shields.io/maven-central/v/io.sentry/sentry-kafka?style=for-the-badge&logo=sentry&color=green) |
| sentry-apollo | ![Maven Central Version](https://img.shields.io/maven-central/v/io.sentry/sentry-apollo?style=for-the-badge&logo=sentry&color=green) | 21 |
| sentry-apollo-3 | ![Maven Central Version](https://img.shields.io/maven-central/v/io.sentry/sentry-apollo-3?style=for-the-badge&logo=sentry&color=green) | 21 |
| sentry-apollo-4 | ![Maven Central Version](https://img.shields.io/maven-central/v/io.sentry/sentry-apollo-4?style=for-the-badge&logo=sentry&color=green) | 21 |
Expand Down
1 change: 1 addition & 0 deletions buildSrc/src/main/java/Config.kt
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ object Config {
val SENTRY_JCACHE_SDK_NAME = "$SENTRY_JAVA_SDK_NAME.jcache"
val SENTRY_QUARTZ_SDK_NAME = "$SENTRY_JAVA_SDK_NAME.quartz"
val SENTRY_JDBC_SDK_NAME = "$SENTRY_JAVA_SDK_NAME.jdbc"
val SENTRY_KAFKA_SDK_NAME = "$SENTRY_JAVA_SDK_NAME.kafka"
val SENTRY_OPENFEATURE_SDK_NAME = "$SENTRY_JAVA_SDK_NAME.openfeature"
val SENTRY_LAUNCHDARKLY_SERVER_SDK_NAME = "$SENTRY_JAVA_SDK_NAME.launchdarkly-server"
val SENTRY_LAUNCHDARKLY_ANDROID_SDK_NAME = "$SENTRY_ANDROID_SDK_NAME.launchdarkly"
Expand Down
2 changes: 2 additions & 0 deletions gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,8 @@ springboot3-starter-security = { module = "org.springframework.boot:spring-boot-
springboot3-starter-jdbc = { module = "org.springframework.boot:spring-boot-starter-jdbc", version.ref = "springboot3" }
springboot3-starter-actuator = { module = "org.springframework.boot:spring-boot-starter-actuator", version.ref = "springboot3" }
springboot3-starter-cache = { module = "org.springframework.boot:spring-boot-starter-cache", version.ref = "springboot3" }
spring-kafka3 = { module = "org.springframework.kafka:spring-kafka", version = "3.3.5" }
kafka-clients = { module = "org.apache.kafka:kafka-clients", version = "3.8.1" }
springboot4-otel = { module = "io.opentelemetry.instrumentation:opentelemetry-spring-boot-starter", version.ref = "otelInstrumentation" }
springboot4-resttestclient = { module = "org.springframework.boot:spring-boot-resttestclient", version.ref = "springboot4" }
springboot4-starter = { module = "org.springframework.boot:spring-boot-starter", version.ref = "springboot4" }
Expand Down
5 changes: 5 additions & 0 deletions sentry-kafka/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# sentry-kafka

This module provides Kafka-native queue instrumentation for applications using `kafka-clients` directly.

Spring users should use `sentry-spring-boot-jakarta` / `sentry-spring-jakarta`, which provide higher-fidelity consumer instrumentation via Spring Kafka hooks.
25 changes: 25 additions & 0 deletions sentry-kafka/api/sentry-kafka.api
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
public final class io/sentry/kafka/BuildConfig {
public static final field SENTRY_KAFKA_SDK_NAME Ljava/lang/String;
public static final field VERSION_NAME Ljava/lang/String;
}

public final class io/sentry/kafka/SentryKafkaConsumerInterceptor : org/apache/kafka/clients/consumer/ConsumerInterceptor {
public static final field TRACE_ORIGIN Ljava/lang/String;
public fun <init> (Lio/sentry/IScopes;)V
public fun close ()V
public fun configure (Ljava/util/Map;)V
public fun onCommit (Ljava/util/Map;)V
public fun onConsume (Lorg/apache/kafka/clients/consumer/ConsumerRecords;)Lorg/apache/kafka/clients/consumer/ConsumerRecords;
}

public final class io/sentry/kafka/SentryKafkaProducerInterceptor : org/apache/kafka/clients/producer/ProducerInterceptor {
public static final field SENTRY_ENQUEUED_TIME_HEADER Ljava/lang/String;
public static final field TRACE_ORIGIN Ljava/lang/String;
public fun <init> (Lio/sentry/IScopes;)V
public fun <init> (Lio/sentry/IScopes;Ljava/lang/String;)V
public fun close ()V
public fun configure (Ljava/util/Map;)V
public fun onAcknowledgement (Lorg/apache/kafka/clients/producer/RecordMetadata;Ljava/lang/Exception;)V
public fun onSend (Lorg/apache/kafka/clients/producer/ProducerRecord;)Lorg/apache/kafka/clients/producer/ProducerRecord;
}

83 changes: 83 additions & 0 deletions sentry-kafka/build.gradle.kts
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import net.ltgt.gradle.errorprone.errorprone
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile

plugins {
`java-library`
id("io.sentry.javadoc")
alias(libs.plugins.kotlin.jvm)
jacoco
alias(libs.plugins.errorprone)
alias(libs.plugins.gradle.versions)
alias(libs.plugins.buildconfig)
}

tasks.withType<KotlinCompile>().configureEach {
compilerOptions.jvmTarget = org.jetbrains.kotlin.gradle.dsl.JvmTarget.JVM_1_8
}

dependencies {
api(projects.sentry)
compileOnly(libs.kafka.clients)
compileOnly(libs.jetbrains.annotations)
compileOnly(libs.nopen.annotations)

errorprone(libs.errorprone.core)
errorprone(libs.nopen.checker)
errorprone(libs.nullaway)

// tests
testImplementation(projects.sentryTestSupport)
testImplementation(kotlin(Config.kotlinStdLib))
testImplementation(libs.kotlin.test.junit)
testImplementation(libs.mockito.kotlin)
testImplementation(libs.mockito.inline)
testImplementation(libs.kafka.clients)
}

configure<SourceSetContainer> { test { java.srcDir("src/test/java") } }

jacoco { toolVersion = libs.versions.jacoco.get() }

tasks.jacocoTestReport {
reports {
xml.required.set(true)
html.required.set(false)
}
}

tasks {
jacocoTestCoverageVerification {
violationRules { rule { limit { minimum = Config.QualityPlugins.Jacoco.minimumCoverage } } }
}
check {
dependsOn(jacocoTestCoverageVerification)
dependsOn(jacocoTestReport)
}
}

tasks.withType<JavaCompile>().configureEach {
options.errorprone {
check("NullAway", net.ltgt.gradle.errorprone.CheckSeverity.ERROR)
option("NullAway:AnnotatedPackages", "io.sentry")
}
}

buildConfig {
useJavaOutput()
packageName("io.sentry.kafka")
buildConfigField("String", "SENTRY_KAFKA_SDK_NAME", "\"${Config.Sentry.SENTRY_KAFKA_SDK_NAME}\"")
buildConfigField("String", "VERSION_NAME", "\"${project.version}\"")
}

tasks.jar {
manifest {
attributes(
"Sentry-Version-Name" to project.version,
"Sentry-SDK-Name" to Config.Sentry.SENTRY_KAFKA_SDK_NAME,
"Sentry-SDK-Package-Name" to "maven:io.sentry:sentry-kafka",
"Implementation-Vendor" to "Sentry",
"Implementation-Title" to project.name,
"Implementation-Version" to project.version,
)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
package io.sentry.kafka;

import io.sentry.BaggageHeader;
import io.sentry.IScopes;
import io.sentry.ITransaction;
import io.sentry.SentryTraceHeader;
import io.sentry.SpanDataConvention;
import io.sentry.SpanStatus;
import io.sentry.TransactionContext;
import io.sentry.TransactionOptions;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerInterceptor;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.header.Header;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;

@ApiStatus.Internal
public final class SentryKafkaConsumerInterceptor<K, V> implements ConsumerInterceptor<K, V> {

public static final @NotNull String TRACE_ORIGIN = "auto.queue.kafka.consumer";

private final @NotNull IScopes scopes;

public SentryKafkaConsumerInterceptor(final @NotNull IScopes scopes) {
this.scopes = scopes;
}

@Override
public @NotNull ConsumerRecords<K, V> onConsume(final @NotNull ConsumerRecords<K, V> records) {
if (!scopes.getOptions().isEnableQueueTracing() || records.isEmpty()) {
return records;
}

final @NotNull ConsumerRecord<K, V> firstRecord = records.iterator().next();

try {
final @Nullable TransactionContext continued = continueTrace(firstRecord);
final @NotNull TransactionContext txContext =
continued != null ? continued : new TransactionContext("queue.receive", "queue.receive");
txContext.setName("queue.receive");
txContext.setOperation("queue.receive");

final @NotNull TransactionOptions txOptions = new TransactionOptions();
txOptions.setOrigin(TRACE_ORIGIN);
txOptions.setBindToScope(false);

final @NotNull ITransaction transaction = scopes.startTransaction(txContext, txOptions);
if (!transaction.isNoOp()) {
transaction.setData(SpanDataConvention.MESSAGING_SYSTEM, "kafka");
transaction.setData(SpanDataConvention.MESSAGING_DESTINATION_NAME, firstRecord.topic());
transaction.setData("messaging.batch.message.count", records.count());
transaction.setStatus(SpanStatus.OK);
transaction.finish();
}
} catch (Throwable ignored) {
// Instrumentation must never break the customer's Kafka poll loop.
}

return records;
}

@Override
public void onCommit(final @NotNull Map<TopicPartition, OffsetAndMetadata> offsets) {}

@Override
public void close() {}

@Override
public void configure(final @Nullable Map<String, ?> configs) {}

private @Nullable TransactionContext continueTrace(final @NotNull ConsumerRecord<K, V> record) {
final @Nullable String sentryTrace = headerValue(record, SentryTraceHeader.SENTRY_TRACE_HEADER);
final @Nullable String baggage = headerValue(record, BaggageHeader.BAGGAGE_HEADER);
final @Nullable List<String> baggageHeaders =
baggage != null ? Collections.singletonList(baggage) : null;
return scopes.continueTrace(sentryTrace, baggageHeaders);
}

private @Nullable String headerValue(
final @NotNull ConsumerRecord<K, V> record, final @NotNull String headerName) {
final @Nullable Header header = record.headers().lastHeader(headerName);
if (header == null || header.value() == null) {
return null;
}
return new String(header.value(), StandardCharsets.UTF_8);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
package io.sentry.kafka;

import io.sentry.BaggageHeader;
import io.sentry.DateUtils;
import io.sentry.IScopes;
import io.sentry.ISpan;
import io.sentry.SentryTraceHeader;
import io.sentry.SpanDataConvention;
import io.sentry.SpanOptions;
import io.sentry.SpanStatus;
import io.sentry.util.TracingUtils;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.apache.kafka.clients.producer.ProducerInterceptor;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.header.Headers;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;

@ApiStatus.Internal
public final class SentryKafkaProducerInterceptor<K, V> implements ProducerInterceptor<K, V> {

public static final @NotNull String TRACE_ORIGIN = "auto.queue.kafka.producer";
public static final @NotNull String SENTRY_ENQUEUED_TIME_HEADER = "sentry-task-enqueued-time";

private final @NotNull IScopes scopes;
private final @NotNull String traceOrigin;

public SentryKafkaProducerInterceptor(final @NotNull IScopes scopes) {
this(scopes, TRACE_ORIGIN);
}

public SentryKafkaProducerInterceptor(
final @NotNull IScopes scopes, final @NotNull String traceOrigin) {
this.scopes = scopes;
this.traceOrigin = traceOrigin;
}

@Override
public @NotNull ProducerRecord<K, V> onSend(final @NotNull ProducerRecord<K, V> record) {
if (!scopes.getOptions().isEnableQueueTracing()) {
return record;
}

final @Nullable ISpan activeSpan = scopes.getSpan();
if (activeSpan == null || activeSpan.isNoOp()) {
return record;
}

try {
final @NotNull SpanOptions spanOptions = new SpanOptions();
spanOptions.setOrigin(traceOrigin);
final @NotNull ISpan span =
activeSpan.startChild("queue.publish", record.topic(), spanOptions);
if (span.isNoOp()) {
return record;
}

span.setData(SpanDataConvention.MESSAGING_SYSTEM, "kafka");
span.setData(SpanDataConvention.MESSAGING_DESTINATION_NAME, record.topic());

injectHeaders(record.headers(), span);

span.setStatus(SpanStatus.OK);
span.finish();
} catch (Throwable ignored) {
// Instrumentation must never break the customer's Kafka send.
}

return record;
}

@Override
public void onAcknowledgement(
final @Nullable RecordMetadata metadata, final @Nullable Exception exception) {}

@Override
public void close() {}

@Override
public void configure(final @Nullable Map<String, ?> configs) {}

private void injectHeaders(final @NotNull Headers headers, final @NotNull ISpan span) {
final @Nullable TracingUtils.TracingHeaders tracingHeaders =
TracingUtils.trace(scopes, null, span);
if (tracingHeaders != null) {
final @NotNull SentryTraceHeader sentryTraceHeader = tracingHeaders.getSentryTraceHeader();
headers.remove(sentryTraceHeader.getName());
headers.add(
sentryTraceHeader.getName(),
sentryTraceHeader.getValue().getBytes(StandardCharsets.UTF_8));

final @Nullable BaggageHeader baggageHeader = tracingHeaders.getBaggageHeader();
if (baggageHeader != null) {
headers.remove(baggageHeader.getName());
headers.add(
baggageHeader.getName(), baggageHeader.getValue().getBytes(StandardCharsets.UTF_8));
}
}

headers.remove(SENTRY_ENQUEUED_TIME_HEADER);
headers.add(
SENTRY_ENQUEUED_TIME_HEADER,
String.valueOf(DateUtils.millisToSeconds(System.currentTimeMillis()))
.getBytes(StandardCharsets.UTF_8));
}
}
Loading