From 9616520ce46608da67303faa3e31fc8b07c07c21 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Thu, 12 Jun 2025 11:32:48 +0200
Subject: [PATCH 1/9] chore(ci): bump version to 2.0.0 (#1876)
Co-authored-by: Powertools for AWS Lambda (Java) Bot <151832416+aws-powertools-bot@users.noreply.github.com>
---
README.md | 6 +++---
examples/pom.xml | 2 +-
examples/powertools-examples-batch/pom.xml | 2 +-
examples/powertools-examples-cloudformation/pom.xml | 2 +-
examples/powertools-examples-core-utilities/cdk/app/pom.xml | 2 +-
.../powertools-examples-core-utilities/cdk/infra/pom.xml | 2 +-
.../powertools-examples-core-utilities/gradle/build.gradle | 6 +++---
.../kotlin/build.gradle.kts | 6 +++---
.../powertools-examples-core-utilities/sam-graalvm/pom.xml | 2 +-
examples/powertools-examples-core-utilities/sam/pom.xml | 2 +-
.../powertools-examples-core-utilities/serverless/pom.xml | 2 +-
.../powertools-examples-core-utilities/terraform/pom.xml | 2 +-
examples/powertools-examples-idempotency/pom.xml | 2 +-
examples/powertools-examples-parameters/sam-graalvm/pom.xml | 2 +-
examples/powertools-examples-parameters/sam/pom.xml | 2 +-
examples/powertools-examples-serialization/pom.xml | 2 +-
examples/powertools-examples-validation/pom.xml | 2 +-
mkdocs.yml | 2 +-
pom.xml | 2 +-
powertools-batch/pom.xml | 2 +-
powertools-cloudformation/pom.xml | 2 +-
powertools-common/pom.xml | 2 +-
powertools-e2e-tests/pom.xml | 2 +-
powertools-idempotency/pom.xml | 2 +-
powertools-idempotency/powertools-idempotency-core/pom.xml | 2 +-
.../powertools-idempotency-dynamodb/pom.xml | 2 +-
powertools-large-messages/pom.xml | 2 +-
powertools-logging/pom.xml | 2 +-
powertools-logging/powertools-logging-log4j/pom.xml | 2 +-
powertools-logging/powertools-logging-logback/pom.xml | 2 +-
powertools-metrics/pom.xml | 2 +-
powertools-parameters/pom.xml | 2 +-
.../powertools-parameters-appconfig/pom.xml | 2 +-
.../powertools-parameters-dynamodb/pom.xml | 2 +-
powertools-parameters/powertools-parameters-secrets/pom.xml | 2 +-
powertools-parameters/powertools-parameters-ssm/pom.xml | 2 +-
powertools-parameters/powertools-parameters-tests/pom.xml | 2 +-
powertools-serialization/pom.xml | 2 +-
powertools-tracing/pom.xml | 2 +-
powertools-validation/pom.xml | 2 +-
40 files changed, 46 insertions(+), 46 deletions(-)
diff --git a/README.md b/README.md
index 625d3d009..88955bca7 100644
--- a/README.md
+++ b/README.md
@@ -22,17 +22,17 @@ Powertools for AWS Lambda (Java) is available in Maven Central. You can use your
software.amazon.lambdapowertools-tracing
- 2.0.0-RC1
+ 2.0.0software.amazon.lambdapowertools-logging
- 2.0.0-RC1
+ 2.0.0software.amazon.lambdapowertools-metrics
- 2.0.0-RC1
+ 2.0.0
...
diff --git a/examples/pom.xml b/examples/pom.xml
index 3462be0b7..6bedc015e 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -20,7 +20,7 @@
software.amazon.lambdapowertools-examples
- 2.0.0-RC1
+ 2.0.0pomPowertools for AWS Lambda (Java) - Examples
diff --git a/examples/powertools-examples-batch/pom.xml b/examples/powertools-examples-batch/pom.xml
index 275a3c96f..ef3c0e4e0 100644
--- a/examples/powertools-examples-batch/pom.xml
+++ b/examples/powertools-examples-batch/pom.xml
@@ -5,7 +5,7 @@
4.0.0software.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-batchjarPowertools for AWS Lambda (Java) - Examples - Batch
diff --git a/examples/powertools-examples-cloudformation/pom.xml b/examples/powertools-examples-cloudformation/pom.xml
index edadd6005..43c81b9f8 100644
--- a/examples/powertools-examples-cloudformation/pom.xml
+++ b/examples/powertools-examples-cloudformation/pom.xml
@@ -3,7 +3,7 @@
4.0.0software.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-cloudformationjar
diff --git a/examples/powertools-examples-core-utilities/cdk/app/pom.xml b/examples/powertools-examples-core-utilities/cdk/app/pom.xml
index 11d5497da..fa5a1927a 100644
--- a/examples/powertools-examples-core-utilities/cdk/app/pom.xml
+++ b/examples/powertools-examples-core-utilities/cdk/app/pom.xml
@@ -6,7 +6,7 @@
software.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-core-utilities-cdkjar
diff --git a/examples/powertools-examples-core-utilities/cdk/infra/pom.xml b/examples/powertools-examples-core-utilities/cdk/infra/pom.xml
index 0b67f20ae..b869a5672 100644
--- a/examples/powertools-examples-core-utilities/cdk/infra/pom.xml
+++ b/examples/powertools-examples-core-utilities/cdk/infra/pom.xml
@@ -4,7 +4,7 @@
4.0.0software.amazon.lambda.examplescdk
- 2.0.0-RC1
+ 2.0.0UTF-82.162.1
diff --git a/examples/powertools-examples-core-utilities/gradle/build.gradle b/examples/powertools-examples-core-utilities/gradle/build.gradle
index bf53c31eb..4cf988a6f 100644
--- a/examples/powertools-examples-core-utilities/gradle/build.gradle
+++ b/examples/powertools-examples-core-utilities/gradle/build.gradle
@@ -29,8 +29,8 @@ dependencies {
implementation 'com.amazonaws:aws-lambda-java-events:3.11.0'
implementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.2'
implementation 'org.aspectj:aspectjrt:1.9.20.1'
- aspect 'software.amazon.lambda:powertools-tracing:2.0.0-RC1'
- aspect 'software.amazon.lambda:powertools-logging-log4j:2.0.0-RC1'
- aspect 'software.amazon.lambda:powertools-metrics:2.0.0-RC1'
+ aspect 'software.amazon.lambda:powertools-tracing:2.0.0'
+ aspect 'software.amazon.lambda:powertools-logging-log4j:2.0.0'
+ aspect 'software.amazon.lambda:powertools-metrics:2.0.0'
}
diff --git a/examples/powertools-examples-core-utilities/kotlin/build.gradle.kts b/examples/powertools-examples-core-utilities/kotlin/build.gradle.kts
index 9dcac3510..7029dc458 100644
--- a/examples/powertools-examples-core-utilities/kotlin/build.gradle.kts
+++ b/examples/powertools-examples-core-utilities/kotlin/build.gradle.kts
@@ -15,9 +15,9 @@ dependencies {
implementation("com.amazonaws:aws-lambda-java-events:3.11.3")
implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.15.2")
implementation("org.aspectj:aspectjrt:1.9.20.1")
- aspect("software.amazon.lambda:powertools-tracing:2.0.0-RC1")
- aspect("software.amazon.lambda:powertools-logging-log4j:2.0.0-RC1")
- aspect("software.amazon.lambda:powertools-metrics:2.0.0-RC1")
+ aspect("software.amazon.lambda:powertools-tracing:2.0.0")
+ aspect("software.amazon.lambda:powertools-logging-log4j:2.0.0")
+ aspect("software.amazon.lambda:powertools-metrics:2.0.0")
implementation("org.jetbrains.kotlin:kotlin-stdlib:1.9.24")
}
diff --git a/examples/powertools-examples-core-utilities/sam-graalvm/pom.xml b/examples/powertools-examples-core-utilities/sam-graalvm/pom.xml
index 011a29c60..b2353b86e 100644
--- a/examples/powertools-examples-core-utilities/sam-graalvm/pom.xml
+++ b/examples/powertools-examples-core-utilities/sam-graalvm/pom.xml
@@ -4,7 +4,7 @@
Powertools for AWS Lambda (Java) - Examples - Core Utilities (logging, tracing, metrics) with SAM GraalVMsoftware.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-core-utilities-sam-graalvmjar
diff --git a/examples/powertools-examples-core-utilities/sam/pom.xml b/examples/powertools-examples-core-utilities/sam/pom.xml
index d43094922..813fc267f 100644
--- a/examples/powertools-examples-core-utilities/sam/pom.xml
+++ b/examples/powertools-examples-core-utilities/sam/pom.xml
@@ -4,7 +4,7 @@
Powertools for AWS Lambda (Java) - Examples - Core Utilities (logging, tracing, metrics) with SAMsoftware.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-core-utilities-samjar
diff --git a/examples/powertools-examples-core-utilities/serverless/pom.xml b/examples/powertools-examples-core-utilities/serverless/pom.xml
index fce13b792..1aea70820 100644
--- a/examples/powertools-examples-core-utilities/serverless/pom.xml
+++ b/examples/powertools-examples-core-utilities/serverless/pom.xml
@@ -4,7 +4,7 @@
Powertools for AWS Lambda (Java) - Examples - Core Utilities (logging, tracing, metrics) with Serverlesssoftware.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-core-utilities-serverlessjar
diff --git a/examples/powertools-examples-core-utilities/terraform/pom.xml b/examples/powertools-examples-core-utilities/terraform/pom.xml
index dd9ed4685..dd4c385c0 100644
--- a/examples/powertools-examples-core-utilities/terraform/pom.xml
+++ b/examples/powertools-examples-core-utilities/terraform/pom.xml
@@ -4,7 +4,7 @@
Powertools for AWS Lambda (Java) - Examples - Core Utilities (logging, tracing, metrics) with Terraformsoftware.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-core-utilities-terraformjar
diff --git a/examples/powertools-examples-idempotency/pom.xml b/examples/powertools-examples-idempotency/pom.xml
index f0fcf545a..c7ceabc57 100644
--- a/examples/powertools-examples-idempotency/pom.xml
+++ b/examples/powertools-examples-idempotency/pom.xml
@@ -17,7 +17,7 @@
4.0.0software.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-idempotencyjarPowertools for AWS Lambda (Java) - Examples - Idempotency
diff --git a/examples/powertools-examples-parameters/sam-graalvm/pom.xml b/examples/powertools-examples-parameters/sam-graalvm/pom.xml
index 8031710b1..f2ce0f21f 100644
--- a/examples/powertools-examples-parameters/sam-graalvm/pom.xml
+++ b/examples/powertools-examples-parameters/sam-graalvm/pom.xml
@@ -2,7 +2,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0software.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-parameters-sam-graalvmjarPowertools for AWS Lambda (Java) - Examples - Parameters GraalVM
diff --git a/examples/powertools-examples-parameters/sam/pom.xml b/examples/powertools-examples-parameters/sam/pom.xml
index 02b991d68..a797bbeed 100644
--- a/examples/powertools-examples-parameters/sam/pom.xml
+++ b/examples/powertools-examples-parameters/sam/pom.xml
@@ -2,7 +2,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0software.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-parameters-samjarPowertools for AWS Lambda (Java) - Examples - Parameters
diff --git a/examples/powertools-examples-serialization/pom.xml b/examples/powertools-examples-serialization/pom.xml
index 560c08a56..b96d02b6b 100644
--- a/examples/powertools-examples-serialization/pom.xml
+++ b/examples/powertools-examples-serialization/pom.xml
@@ -2,7 +2,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0software.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-serializationjarPowertools for AWS Lambda (Java) - Examples - Serialization
diff --git a/examples/powertools-examples-validation/pom.xml b/examples/powertools-examples-validation/pom.xml
index 8bed7cb5e..9b6df9783 100644
--- a/examples/powertools-examples-validation/pom.xml
+++ b/examples/powertools-examples-validation/pom.xml
@@ -16,7 +16,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0software.amazon.lambda.examples
- 2.0.0-RC1
+ 2.0.0powertools-examples-validationjarPowertools for AWS Lambda (Java) - Examples - Validation
diff --git a/mkdocs.yml b/mkdocs.yml
index 6f185584b..ee6bd7322 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -118,7 +118,7 @@ extra_javascript:
extra:
powertools:
- version: 2.0.0-RC1
+ version: 2.0.0
repo_url: https://github.com/aws-powertools/powertools-lambda-java
edit_uri: edit/main/docs
diff --git a/pom.xml b/pom.xml
index af17d6b1b..951e155f6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -20,7 +20,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0-RC1
+ 2.0.0pomPowertools for AWS Lambda (Java) - Parent
diff --git a/powertools-batch/pom.xml b/powertools-batch/pom.xml
index 20dbd179b..59e5b8c93 100644
--- a/powertools-batch/pom.xml
+++ b/powertools-batch/pom.xml
@@ -6,7 +6,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0-RC1
+ 2.0.0A suite of utilities that makes batch message processing using AWS Lambda easier.
diff --git a/powertools-cloudformation/pom.xml b/powertools-cloudformation/pom.xml
index e3ab012ae..271704dea 100644
--- a/powertools-cloudformation/pom.xml
+++ b/powertools-cloudformation/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0Powertools for AWS Lambda (Java) - Cloudformation
diff --git a/powertools-common/pom.xml b/powertools-common/pom.xml
index b9a8572ea..dd6ef8e61 100644
--- a/powertools-common/pom.xml
+++ b/powertools-common/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0Powertools for AWS Lambda (Java) - Common Internal Utilities
diff --git a/powertools-e2e-tests/pom.xml b/powertools-e2e-tests/pom.xml
index ce9e12860..4f1c059e9 100644
--- a/powertools-e2e-tests/pom.xml
+++ b/powertools-e2e-tests/pom.xml
@@ -20,7 +20,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0powertools-e2e-tests
diff --git a/powertools-idempotency/pom.xml b/powertools-idempotency/pom.xml
index 286ea3f97..862cf3160 100644
--- a/powertools-idempotency/pom.xml
+++ b/powertools-idempotency/pom.xml
@@ -21,7 +21,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0-RC1
+ 2.0.0powertools-idempotency
diff --git a/powertools-idempotency/powertools-idempotency-core/pom.xml b/powertools-idempotency/powertools-idempotency-core/pom.xml
index 3012406af..58d184fc5 100644
--- a/powertools-idempotency/powertools-idempotency-core/pom.xml
+++ b/powertools-idempotency/powertools-idempotency-core/pom.xml
@@ -21,7 +21,7 @@
software.amazon.lambdapowertools-idempotency
- 2.0.0-RC1
+ 2.0.0powertools-idempotency-core
diff --git a/powertools-idempotency/powertools-idempotency-dynamodb/pom.xml b/powertools-idempotency/powertools-idempotency-dynamodb/pom.xml
index e985f8f14..b92d66dbc 100644
--- a/powertools-idempotency/powertools-idempotency-dynamodb/pom.xml
+++ b/powertools-idempotency/powertools-idempotency-dynamodb/pom.xml
@@ -21,7 +21,7 @@
software.amazon.lambdapowertools-idempotency
- 2.0.0-RC1
+ 2.0.0powertools-idempotency-dynamodb
diff --git a/powertools-large-messages/pom.xml b/powertools-large-messages/pom.xml
index 9569c9ea8..b23e3f41c 100644
--- a/powertools-large-messages/pom.xml
+++ b/powertools-large-messages/pom.xml
@@ -23,7 +23,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0-RC1
+ 2.0.0powertools-large-messages
diff --git a/powertools-logging/pom.xml b/powertools-logging/pom.xml
index ade7c95fc..75d5853e5 100644
--- a/powertools-logging/pom.xml
+++ b/powertools-logging/pom.xml
@@ -21,7 +21,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0Powertools for AWS Lambda (Java) - Logging
diff --git a/powertools-logging/powertools-logging-log4j/pom.xml b/powertools-logging/powertools-logging-log4j/pom.xml
index e85fc6567..75aa94a97 100644
--- a/powertools-logging/powertools-logging-log4j/pom.xml
+++ b/powertools-logging/powertools-logging-log4j/pom.xml
@@ -7,7 +7,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0../../pom.xml
diff --git a/powertools-logging/powertools-logging-logback/pom.xml b/powertools-logging/powertools-logging-logback/pom.xml
index 212379439..8b2a5cfd5 100644
--- a/powertools-logging/powertools-logging-logback/pom.xml
+++ b/powertools-logging/powertools-logging-logback/pom.xml
@@ -6,7 +6,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0../../pom.xml
diff --git a/powertools-metrics/pom.xml b/powertools-metrics/pom.xml
index 8ae52d43d..460eb220f 100644
--- a/powertools-metrics/pom.xml
+++ b/powertools-metrics/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0Powertools for AWS Lambda (Java) - Metrics
diff --git a/powertools-parameters/pom.xml b/powertools-parameters/pom.xml
index ad8f14e36..96f6f50b3 100644
--- a/powertools-parameters/pom.xml
+++ b/powertools-parameters/pom.xml
@@ -21,7 +21,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0powertools-parameters
diff --git a/powertools-parameters/powertools-parameters-appconfig/pom.xml b/powertools-parameters/powertools-parameters-appconfig/pom.xml
index 840bb5159..a3822d11b 100644
--- a/powertools-parameters/powertools-parameters-appconfig/pom.xml
+++ b/powertools-parameters/powertools-parameters-appconfig/pom.xml
@@ -7,7 +7,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0-RC1
+ 2.0.0../../pom.xml
diff --git a/powertools-parameters/powertools-parameters-dynamodb/pom.xml b/powertools-parameters/powertools-parameters-dynamodb/pom.xml
index 1220b1eb5..9c7030d7c 100644
--- a/powertools-parameters/powertools-parameters-dynamodb/pom.xml
+++ b/powertools-parameters/powertools-parameters-dynamodb/pom.xml
@@ -7,7 +7,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0-RC1
+ 2.0.0../../pom.xml
diff --git a/powertools-parameters/powertools-parameters-secrets/pom.xml b/powertools-parameters/powertools-parameters-secrets/pom.xml
index 93bd9a26e..99a308825 100644
--- a/powertools-parameters/powertools-parameters-secrets/pom.xml
+++ b/powertools-parameters/powertools-parameters-secrets/pom.xml
@@ -7,7 +7,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0-RC1
+ 2.0.0../../pom.xml
diff --git a/powertools-parameters/powertools-parameters-ssm/pom.xml b/powertools-parameters/powertools-parameters-ssm/pom.xml
index 1c91824e9..46cf939ba 100644
--- a/powertools-parameters/powertools-parameters-ssm/pom.xml
+++ b/powertools-parameters/powertools-parameters-ssm/pom.xml
@@ -7,7 +7,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0-RC1
+ 2.0.0../../pom.xml
diff --git a/powertools-parameters/powertools-parameters-tests/pom.xml b/powertools-parameters/powertools-parameters-tests/pom.xml
index 37e266bde..4d2b5d145 100644
--- a/powertools-parameters/powertools-parameters-tests/pom.xml
+++ b/powertools-parameters/powertools-parameters-tests/pom.xml
@@ -6,7 +6,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0-RC1
+ 2.0.0../../pom.xml
diff --git a/powertools-serialization/pom.xml b/powertools-serialization/pom.xml
index ec8acab9e..986a3b1d9 100644
--- a/powertools-serialization/pom.xml
+++ b/powertools-serialization/pom.xml
@@ -21,7 +21,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0powertools-serialization
diff --git a/powertools-tracing/pom.xml b/powertools-tracing/pom.xml
index a869679a8..14d6d51b1 100644
--- a/powertools-tracing/pom.xml
+++ b/powertools-tracing/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0Powertools for AWS Lambda (Java) - Tracing
diff --git a/powertools-validation/pom.xml b/powertools-validation/pom.xml
index 8720509bb..bfedf8d40 100644
--- a/powertools-validation/pom.xml
+++ b/powertools-validation/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0-RC1
+ 2.0.0Powertools for AWS Lambda (Java) - Validation
From 871a844d10d8e54180737c95d1747dd3a045cc45 Mon Sep 17 00:00:00 2001
From: Philipp Page
Date: Thu, 12 Jun 2025 12:24:27 +0200
Subject: [PATCH 2/9] docs: Version documentation (#1878)
* Add versioning plugin for docs and update workflows.
* Add old version banner override. Remove preview wording.
---
.github/workflows/build-docs.yml | 68 ++++++++++++++++++++++----------
.github/workflows/release.yml | 55 +++++++++++++++++++++-----
docs/index.md | 8 ----
docs/overrides/main.html | 8 ++++
mkdocs.yml | 9 +++--
5 files changed, 106 insertions(+), 42 deletions(-)
create mode 100644 docs/overrides/main.html
diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml
index d4bf75a9d..5d6870171 100644
--- a/.github/workflows/build-docs.yml
+++ b/.github/workflows/build-docs.yml
@@ -1,7 +1,7 @@
-# Build Docs
+# Build Latest Docs
#
# Description:
-# Builds the docs and stores them in S3 to be served by our docs platform
+# Builds the latest docs and stores them in S3 to be served by our docs platform
#
# The workflow allows us to build to the main location (/lambda/java/) and to an alias
# (i.e. /lambda/java/preview/) if needed
@@ -15,17 +15,13 @@
on:
workflow_dispatch:
inputs:
- alias:
+ version:
+ description: "Version to build and publish docs (1.28.0, develop)"
+ required: true
type: string
- required: false
- description: |
- Alias to deploy the documentation into, this is mostly for testing pre-release
- versions of the documentation, such as beta versions or snapshots.
- https://docs.powertools.aws.dev/lambda/java/
-
-name: Build Docs
-run-name: Build Docs - ${{ contains(github.head_ref, 'main') && 'main' || inputs.alias }}
+name: Build Latest Docs
+run-name: Build Latest Docs - ${{ inputs.version }}
jobs:
docs:
@@ -35,28 +31,58 @@ jobs:
id-token: write
environment: Docs
steps:
- - name: Sanity Check
- if: ${{ github.head_ref != 'main' || inputs.alias == '' }}
- run:
- echo "::error::No buildable docs"
-
- name: Checkout Repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- with:
+ with:
fetch-depth: 0
- name: Build
run: |
mkdir -p dist
docker build -t squidfunk/mkdocs-material ./docs/
docker run --rm -t -v ${PWD}:/docs squidfunk/mkdocs-material build
- cp -R site/* dist/
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722
with:
aws-region: us-east-1
role-to-assume: ${{ secrets.AWS_DOCS_ROLE_ARN }}
- - name: Deploy
+ - name: Deploy Docs (Version)
+ env:
+ VERSION: ${{ inputs.version }}
+ ALIAS: "latest"
run: |
aws s3 sync \
- dist \
- s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/${{ github.head_ref == 'main' && '' || format('{0}/', inputs.alias )}}
\ No newline at end of file
+ site/ \
+ s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/${{ env.VERSION }}/
+ - name: Deploy Docs (Alias)
+ env:
+ VERSION: ${{ inputs.version }}
+ ALIAS: "latest"
+ run: |
+ aws s3 sync \
+ site/ \
+ s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/${{ env.ALIAS }}/
+ - name: Deploy Docs (Version JSON)
+ env:
+ VERSION: ${{ inputs.version }}
+ ALIAS: "latest"
+ # We originally used "mike" from PyPi to manage versions for us, but since we moved to S3, we can't use it to manage versions any more.
+ # Instead, we're using some shell script that manages the versions.
+ #
+ # Operations:
+ # 1. Download the versions.json file from S3
+ # 2. Find any reference to the alias and delete it from the versions file
+ # 3. This is voodoo (don't use JQ):
+ # - we assign the input as $o and the new version/alias as $n,
+ # - we check if the version number exists in the file already (for republishing docs)
+ # - if it's an alias (stage/latest/*) or old version, we do nothing and output $o (original input)
+ # - if it's a new version number, we add it at position 0 in the array.
+ # 4. Once done, we'll upload it back to S3.
+ run: |
+ aws s3 cp \
+ s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/versions.json \
+ versions_old.json
+ jq 'del(.[].aliases[] | select(. == "${{ env.ALIAS }}"))' < versions_old.json > versions_proc.json
+ jq '. as $o | [{"title": "${{ env.VERSION }}", "version": "${{ env.VERSION }}", "aliases": ["${{ env.ALIAS }}"] }] as $n | $n | if .[0].title | test("[a-z]+") or any($o[].title == $n[0].title;.) then [($o | .[] | select(.title == $n[0].title).aliases += $n[0].aliases | . )] else $n + $o end' < versions_proc.json > versions.json
+ aws s3 cp \
+ versions.json \
+ s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/versions.json
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 54924b558..2fa4770c2 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -20,7 +20,7 @@
#
# Triggers:
# - workflow_dispatch
-#
+#
# Secrets:
# - RELEASE.GPG_SIGNING_KEY
# - RELEASE.OSSRH_JIRA_USERNAME
@@ -39,7 +39,7 @@ on:
type: boolean
description: Create snapshot release
default: false
- skip_checks:
+ skip_checks:
type: boolean
description: Skip quality checks
default: false
@@ -47,7 +47,7 @@ on:
type: boolean
description: Skip publish to Maven Central
default: false
- continue_on_error:
+ continue_on_error:
type: boolean
description: Continue to build if there's an error in quality checks
default: false
@@ -55,7 +55,7 @@ on:
name: Release
run-name: Release – ${{ inputs.version }}
-permissions:
+permissions:
contents: read
env:
@@ -124,7 +124,7 @@ jobs:
quality:
runs-on: ubuntu-latest
- needs:
+ needs:
- version_seal
if: ${{ inputs.skip_checks == false }}
permissions:
@@ -211,7 +211,7 @@ jobs:
MAVEN_USERNAME: ${{ secrets.MAVEN_CENTRAL_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.MAVEN_CENTRAL_PASSWORD }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
-
+
create_pr:
runs-on: ubuntu-latest
if: ${{ inputs.snapshot == false && always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
@@ -278,14 +278,49 @@ jobs:
mkdir -p dist
docker build -t squidfunk/mkdocs-material ./docs/
docker run --rm -t -v ${PWD}:/docs squidfunk/mkdocs-material build
- cp -R site/* dist/
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722
with:
aws-region: us-east-1
role-to-assume: ${{ secrets.AWS_DOCS_ROLE_ARN }}
- - name: Deploy
+ - name: Deploy Docs (Version)
+ env:
+ VERSION: ${{ inputs.version }}
+ ALIAS: 'latest'
run: |
aws s3 sync \
- dist \
- s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/${{ inputs.version }}/
+ site/ \
+ s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/${{ env.VERSION }}/
+ - name: Deploy Docs (Alias)
+ env:
+ VERSION: ${{ inputs.version }}
+ ALIAS: 'latest'
+ run: |
+ aws s3 sync \
+ site/ \
+ s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/${{ env.ALIAS }}/
+ - name: Deploy Docs (Version JSON)
+ env:
+ VERSION: ${{ inputs.version }}
+ ALIAS: 'latest'
+ # We originally used "mike" from PyPi to manage versions for us, but since we moved to S3, we can't use it to manage versions any more.
+ # Instead, we're using some shell script that manages the versions.
+ #
+ # Operations:
+ # 1. Download the versions.json file from S3
+ # 2. Find any reference to the alias and delete it from the versions file
+ # 3. This is voodoo (don't use JQ):
+ # - we assign the input as $o and the new version/alias as $n,
+ # - we check if the version number exists in the file already (for republishing docs)
+ # - if it's an alias (stage/latest/*) or old version, we do nothing and output $o (original input)
+ # - if it's a new version number, we add it at position 0 in the array.
+ # 4. Once done, we'll upload it back to S3.
+ run: |
+ aws s3 cp \
+ s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/versions.json \
+ versions_old.json
+ jq 'del(.[].aliases[] | select(. == "${{ env.ALIAS }}"))' < versions_old.json > versions_proc.json
+ jq '. as $o | [{"title": "${{ env.VERSION }}", "version": "${{ env.VERSION }}", "aliases": ["${{ env.ALIAS }}"] }] as $n | $n | if .[0].title | test("[a-z]+") or any($o[].title == $n[0].title;.) then [($o | .[] | select(.title == $n[0].title).aliases += $n[0].aliases | . )] else $n + $o end' < versions_proc.json > versions.json
+ aws s3 cp \
+ versions.json \
+ s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-java/versions.json
diff --git a/docs/index.md b/docs/index.md
index 43d1ea03b..3c6b9506b 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -3,14 +3,6 @@ title: Homepage
description: Powertools for AWS Lambda (Java)
---
-  
-
-???+ warning
- You are browsing the documentation for Powertools for AWS Lambda (Java) - v2. This is a snapshot release and not stable!
- Check out our stable [v1](https://docs.powertools.aws.dev/lambda/java/) documentation if this is not what you wanted.
- The v2 maven snapshot repository can be found [here](https://aws.oss.sonatype.org/content/repositories/snapshots/software/amazon/lambda/) .
-
Powertools for AWS Lambda (Java) is a suite of utilities for AWS Lambda Functions that makes tracing with AWS X-Ray, structured logging and creating custom metrics asynchronously easier.
???+ tip
diff --git a/docs/overrides/main.html b/docs/overrides/main.html
new file mode 100644
index 000000000..e4c38e21b
--- /dev/null
+++ b/docs/overrides/main.html
@@ -0,0 +1,8 @@
+{% extends "base.html" %}
+
+{% block outdated %}
+You're not viewing the latest version.
+
+ Click here to go to latest.
+
+{% endblock %}
diff --git a/mkdocs.yml b/mkdocs.yml
index ee6bd7322..82a32d49c 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,7 +1,7 @@
-site_name: Powertools for AWS Lambda (Java) Preview
-site_description: Powertools for AWS Lambda (Java) Preview
+site_name: Powertools for AWS Lambda (Java)
+site_description: Powertools for AWS Lambda (Java)
site_author: Amazon Web Services
-site_url: https://docs.powertools.aws.dev/lambda/java/preview/
+site_url: https://docs.powertools.aws.dev/lambda/java/
nav:
- Homepage: index.md
- Changelog: changelog.md
@@ -119,6 +119,9 @@ extra_javascript:
extra:
powertools:
version: 2.0.0
+ version:
+ provider: mike
+ default: latest
repo_url: https://github.com/aws-powertools/powertools-lambda-java
edit_uri: edit/main/docs
From 0b275948638450165281a1d7abb5b4d321f3aeec Mon Sep 17 00:00:00 2001
From: Simon Thulbourn
Date: Fri, 13 Jun 2025 11:05:23 +0200
Subject: [PATCH 3/9] chore(ci): Update workflows to make v2 the default
(#1888)
* chore(ci): Make v2 the main branch
* update runner size for release
* update skip checks
* create tag instead of release
---
.github/auto_assign-issues.yml | 9 ---------
.github/workflows/check-build.yml | 1 -
.github/workflows/check-e2e.yml | 1 -
.github/workflows/check-spotbugs.yml | 2 +-
.github/workflows/release-drafter.yml | 5 -----
.github/workflows/release.yml | 17 ++++++++---------
.../workflows/security-branch-protections.yml | 2 +-
.github/workflows/security-dependabot.yml | 2 +-
.github/workflows/security-osv.yml | 2 --
9 files changed, 11 insertions(+), 30 deletions(-)
delete mode 100644 .github/auto_assign-issues.yml
diff --git a/.github/auto_assign-issues.yml b/.github/auto_assign-issues.yml
deleted file mode 100644
index fb160ed94..000000000
--- a/.github/auto_assign-issues.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-addAssignees: true
-
-# The list of users to assign to new issues.
-# If empty or not provided, the repository owner is assigned
-assignees:
- - scottgerring
- - jeromevdl
- - mriccia
- - msailes
diff --git a/.github/workflows/check-build.yml b/.github/workflows/check-build.yml
index cc5931d05..b5fe372dc 100644
--- a/.github/workflows/check-build.yml
+++ b/.github/workflows/check-build.yml
@@ -38,7 +38,6 @@ on:
push:
branches:
- main
- - v2
paths: # add other modules when there are under e2e tests
- 'powertools-batch/**'
- 'powertools-core/**'
diff --git a/.github/workflows/check-e2e.yml b/.github/workflows/check-e2e.yml
index 14eab5394..d5c95f156 100644
--- a/.github/workflows/check-e2e.yml
+++ b/.github/workflows/check-e2e.yml
@@ -15,7 +15,6 @@ on:
push:
branches:
- main
- - v2
paths: # add other modules when there are under e2e tests
- 'powertools-batch/**'
- 'powertools-core/**'
diff --git a/.github/workflows/check-spotbugs.yml b/.github/workflows/check-spotbugs.yml
index 0749dfaa0..fd76d9560 100644
--- a/.github/workflows/check-spotbugs.yml
+++ b/.github/workflows/check-spotbugs.yml
@@ -9,7 +9,7 @@
on:
pull_request:
branches:
- - v2
+ - main
paths:
- 'powertools-batch/**'
- 'powertools-core/**'
diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml
index f727ee25d..39d453ced 100644
--- a/.github/workflows/release-drafter.yml
+++ b/.github/workflows/release-drafter.yml
@@ -15,16 +15,11 @@ name: Release Drafter
run-name: Release Drafter
jobs:
-<<<<<<< HEAD
- update_release_draft:
- runs-on: ubuntu-latest
-=======
update_release:
runs-on: ubuntu-latest
permissions:
contents: write
id-token: write
->>>>>>> 4a17172a (chore(automation): Update automation workflows (#1779))
steps:
- name: Relase Drafter
uses: release-drafter/release-drafter@b1476f6e6eb133afa41ed8589daba6dc69b4d3f5
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 2fa4770c2..dc462fbfc 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -82,7 +82,7 @@ jobs:
- id: base
name: Base
run: |
- echo build_version=$(test ${{ github.ref_name }} == "v2" && echo "v2" || echo "v1") >> $GITHUB_OUTPUT
+ echo build_version=$(test ${{ github.ref_name }} == "main" && echo "v2" || echo "v1") >> $GITHUB_OUTPUT
- id: build_matrix_v1
name: Build matrix (v1)
if: ${{ steps.base.outputs.build_version == 'v1' }}
@@ -123,7 +123,7 @@ jobs:
retention-days: 1
quality:
- runs-on: ubuntu-latest
+ runs-on: aws-powertools_ubuntu-latest_8-core
needs:
- version_seal
if: ${{ inputs.skip_checks == false }}
@@ -156,7 +156,7 @@ jobs:
uploadSarifReport: false
build:
- runs-on: ubuntu-latest
+ runs-on: aws-powertools_ubuntu-latest_8-core
needs:
- setup
- quality
@@ -183,8 +183,8 @@ jobs:
mvn -B install --file pom.xml
publish:
- runs-on: ubuntu-latest
- if: ${{ github.repository == 'aws-powertools/powertools-lambda-java' && inputs.skip_publish == false }}
+ runs-on: aws-powertools_ubuntu-latest_8-core
+ if: ${{ github.repository == 'aws-powertools/powertools-lambda-java' && inputs.skip_publish == false && always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
needs:
- build
environment: Release
@@ -254,11 +254,10 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- id: tag
- name: Create release
+ name: Create tag
run: |
- gh release create v${{ inputs.version }} --target $(git rev-parse HEAD)
- env:
- GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ git tag -a v${{ inputs.version }} -m "Release v${{ inputs.version }}"
+ git push origin v${{ inputs.version }}
docs:
runs-on: ubuntu-latest
diff --git a/.github/workflows/security-branch-protections.yml b/.github/workflows/security-branch-protections.yml
index dc7c06316..05a082b0b 100644
--- a/.github/workflows/security-branch-protections.yml
+++ b/.github/workflows/security-branch-protections.yml
@@ -43,7 +43,7 @@ jobs:
# List of branches we want to monitor for protection changes
branch:
- main
- - v2
+ - v1
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
diff --git a/.github/workflows/security-dependabot.yml b/.github/workflows/security-dependabot.yml
index 095219045..e1422fb2b 100644
--- a/.github/workflows/security-dependabot.yml
+++ b/.github/workflows/security-dependabot.yml
@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'dependabot[bot]' && github.repository == 'aws-powertools/powertools-lambda-java' }}
permissions:
- pull-requests: read
+ pull-requests: write
steps:
- id: dependabot-metadata
name: Fetch Dependabot metadata
diff --git a/.github/workflows/security-osv.yml b/.github/workflows/security-osv.yml
index b332faae3..67e2e6e3f 100644
--- a/.github/workflows/security-osv.yml
+++ b/.github/workflows/security-osv.yml
@@ -13,14 +13,12 @@ on:
pull_request:
branches:
- main
- - v2
workflow_dispatch: {}
schedule:
- cron: "30 12 * * 1"
push:
branches:
- main
- - v2
name: OpenSource Vulnerability Scanner
run-name: OpenSource Vulnerability Scanner
From cc1b3b11d0118f35bbc22dd318693e05fec87e71 Mon Sep 17 00:00:00 2001
From: Philipp Page
Date: Mon, 16 Jun 2025 16:59:54 +0200
Subject: [PATCH 4/9] docs: Announce deprecation of v1
---
docs/processes/versioning.md | 7 ++++---
docs/upgrade.md | 5 ++++-
2 files changed, 8 insertions(+), 4 deletions(-)
diff --git a/docs/processes/versioning.md b/docs/processes/versioning.md
index 8b12e0fa9..bbb60f507 100644
--- a/docs/processes/versioning.md
+++ b/docs/processes/versioning.md
@@ -55,6 +55,7 @@ To see the list of available major versions of Powertools for AWS Lambda and whe
### Version support matrix
-| SDK | Major version | Current Phase | General Availability Date | Notes |
-| -------------------------------- | ------------- | -------------------- | ------------------------- | ------------------------------------------------------------------------------------------------- |
-| Powertools for AWS Lambda (Java) | 1.x | General Availability | 11/04/2020 | See [Release notes](https://github.com/aws-powertools/powertools-lambda-java/releases/tag/v1.0.0) |
+| SDK | Major version | Current Phase | General Availability Date | Notes |
+| -------------------------------- | ------------- | -------------------- | ------------------------- | ------------------------------------------------------------------------------------------------------------------- |
+| Powertools for AWS Lambda (Java) | 2.x | General Availability | 06/12/2025 | See [Release notes](https://github.com/aws-powertools/powertools-lambda-java/releases/tag/v2.0.0) |
+| Powertools for AWS Lambda (Java) | 1.x | Maintenance | 11/04/2020 | End-of-support: December 12, 2025. See [upgrade guide](https://docs.powertools.aws.dev/lambda/java/latest/upgrade/) |
diff --git a/docs/upgrade.md b/docs/upgrade.md
index d1388d95b..5b6d16d99 100644
--- a/docs/upgrade.md
+++ b/docs/upgrade.md
@@ -5,7 +5,10 @@ description: Guide to update between major Powertools for AWS Lambda (Java) vers
## End of support v1
-
+
+!!! warning "End of support notice"
+ On December 12th, 2025, Powertools for AWS Lambda (Java) v1 will reach end of support and will no longer receive updates or releases. If you are still using v1, we strongly recommend you to read our upgrade guide and update to the latest version.
+
Given our commitment to all of our customers using Powertools for AWS Lambda (Java), we will keep [Maven Central](https://central.sonatype.com/search?q=powertools){target="\_blank"} `v1` releases and a `v1` documentation archive to prevent any disruption.
From 8a040ac87a2a31662b7ac8980601b4d821cc29d8 Mon Sep 17 00:00:00 2001
From: Philipp Page
Date: Mon, 16 Jun 2025 17:00:49 +0200
Subject: [PATCH 5/9] fix(metrics): Do not flush when no metrics were added to
avoid printing root-level _aws dict (#1891)
* fix(metrics): Do not flush when no metrics were added to avoid printing root-level _aws dict.
* Fix pmd linting failures.
---
.../powertools/metrics/internal/EmfMetricsLogger.java | 3 ++-
.../metrics/internal/EmfMetricsLoggerTest.java | 10 ++++++----
2 files changed, 8 insertions(+), 5 deletions(-)
diff --git a/powertools-metrics/src/main/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLogger.java b/powertools-metrics/src/main/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLogger.java
index a55e1da5a..1eedd270d 100644
--- a/powertools-metrics/src/main/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLogger.java
+++ b/powertools-metrics/src/main/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLogger.java
@@ -164,8 +164,9 @@ public void flush() {
} else {
LOGGER.warn("No metrics were emitted");
}
+ } else {
+ emfLogger.flush();
}
- emfLogger.flush();
}
@Override
diff --git a/powertools-metrics/src/test/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLoggerTest.java b/powertools-metrics/src/test/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLoggerTest.java
index 1b7106ece..a4fc0d61c 100644
--- a/powertools-metrics/src/test/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLoggerTest.java
+++ b/powertools-metrics/src/test/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLoggerTest.java
@@ -51,7 +51,7 @@ class EmfMetricsLoggerTest {
private Metrics metrics;
private final ObjectMapper objectMapper = new ObjectMapper();
- private final PrintStream standardOut = System.out;
+ private static final PrintStream standardOut = System.out;
private final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
@BeforeEach
@@ -180,7 +180,7 @@ void shouldAddDimension() throws Exception {
JsonNode dimensions = rootNode.get("_aws").get("CloudWatchMetrics").get(0).get("Dimensions").get(0);
boolean hasDimension = false;
for (JsonNode dimension : dimensions) {
- if (dimension.asText().equals("CustomDimension")) {
+ if ("CustomDimension".equals(dimension.asText())) {
hasDimension = true;
break;
}
@@ -233,9 +233,9 @@ void shouldAddDimensionSet() throws Exception {
boolean hasDim2 = false;
for (JsonNode dimension : dimensions) {
String dimName = dimension.asText();
- if (dimName.equals("Dim1")) {
+ if ("Dim1".equals(dimName)) {
hasDim1 = true;
- } else if (dimName.equals("Dim2")) {
+ } else if ("Dim2".equals(dimName)) {
hasDim2 = true;
}
}
@@ -348,6 +348,8 @@ void shouldLogWarningOnEmptyMetrics() throws Exception {
// Read the log file and check for the warning
String logContent = new String(Files.readAllBytes(logFile.toPath()), StandardCharsets.UTF_8);
assertThat(logContent).contains("No metrics were emitted");
+ // No EMF output should be generated
+ assertThat(outputStreamCaptor.toString().trim()).isEmpty();
}
@Test
From eebc06ae150d4b04cc48bea67785ab019c7258a1 Mon Sep 17 00:00:00 2001
From: Philipp Page
Date: Wed, 18 Jun 2025 19:02:43 +0200
Subject: [PATCH 6/9] feat(kafka): New Kafka utility (#1898)
* Add initial code for KafkaJson and KafkaAvro request handlers.
* Add deserialization via @Deserialization annotation.
* Add TODOs in code.
* Fix typos and make AbstractKafkaDeserializer package private.
* Remove request handler implementation in favor for @Deserialization annotation.
* Parse Timestamp type correctly.
* Remove custom RequestHandler implementation example.
* Make AspectJ version compatible with min version Java 11.
* Clarify exception message when deserialization fails.
* Add more advanced JSON escpaing to JSONSerializer in logging module.
* Add protobuf deserialization logic and fully working example.
* Add Maven profile to compile a JAR with different dependency combinations.
* Add minimal kafka example.
* Add missing copyright.
* Add unit tests for kafka utility.
* Add minimal kafka example to examples module in pom.xml.
* Add some comments.
* Update powertools-examples-kafka with README and make it more minimalistic. Remove powertools-examples-kafka-minimal.
* Implement PR feedback from Karthik.
* Fix SAM outputs.
* Do not fail on unknown properties when deserializating into KafkaEvent.
* Allow customers to bring their own kafka-clients dependency.
* Add Kafka utility documentation.
* Update project version consistently to 2.0.0.
* fix: Fix bug where abbreviated _HANDLER env var did not detect the Deserialization annotation.
* fix: Bug when trying to deserialize a type into itself for Lambda default behavior. We can just return the type itself. Relevant for simple String and InputStream handlers.
* When falling back to Lambda default, handle conversion between InputStream and String.
* Raise a runtime exception when the KafkaEvent is invalid.
* docs: Announce deprecation of v1
* fix(metrics): Do not flush when no metrics were added to avoid printing root-level _aws dict (#1891)
* fix(metrics): Do not flush when no metrics were added to avoid printing root-level _aws dict.
* Fix pmd linting failures.
* Rename docs to Kafka Consumer and add line highlights for code examples.
* Fix Spotbug issues.
* Reduce cognitive complexity of DeserializationUtils making it more modular and representing handler information in a simple HandlerInfo class.
* Reduce cognitive complexity of AbstractKafkaDeserializer.
* Enable removal policy DESTROY on e2e test for kinesis streams and SQS queues to avoid exceeding account limit.
* Replace System.out with Powertools Logging.
* Add notice about kafka-clients compatibility.
* Add sentence stating that Avro / Protobuf classes can be autogenerated.
---
docs/utilities/kafka.md | 1001 +++++++++++++++++
examples/pom.xml | 3 +-
examples/powertools-examples-kafka/README.md | 77 ++
.../events/kafka-avro-event.json | 51 +
.../events/kafka-json-event.json | 51 +
.../events/kafka-protobuf-event.json | 51 +
examples/powertools-examples-kafka/pom.xml | 232 ++++
.../src/main/avro/AvroProduct.avsc | 10 +
.../kafka/AvroDeserializationFunction.java | 37 +
.../kafka/JsonDeserializationFunction.java | 35 +
.../src/main/java/org/demo/kafka/Product.java | 63 ++
.../ProtobufDeserializationFunction.java | 38 +
.../java/org/demo/kafka/avro/AvroProduct.java | 476 ++++++++
.../demo/kafka/protobuf/ProtobufProduct.java | 636 +++++++++++
.../protobuf/ProtobufProductOrBuilder.java | 36 +
.../protobuf/ProtobufProductOuterClass.java | 63 ++
.../src/main/proto/ProtobufProduct.proto | 13 +
.../src/main/resources/log4j2.xml | 16 +
.../powertools-examples-kafka/template.yaml | 59 +
.../powertools-examples-kafka/tools/README.md | 66 ++
.../powertools-examples-kafka/tools/pom.xml | 104 ++
.../java/org/demo/kafka/avro/AvroProduct.java | 476 ++++++++
.../demo/kafka/protobuf/ProtobufProduct.java | 636 +++++++++++
.../protobuf/ProtobufProductOrBuilder.java | 36 +
.../protobuf/ProtobufProductOuterClass.java | 63 ++
.../demo/kafka/tools/GenerateAvroSamples.java | 121 ++
.../demo/kafka/tools/GenerateJsonSamples.java | 126 +++
.../kafka/tools/GenerateProtobufSamples.java | 125 ++
mkdocs.yml | 7 +-
pom.xml | 7 +-
powertools-e2e-tests/handlers/batch/pom.xml | 2 +-
.../handlers/idempotency/pom.xml | 2 +-
.../handlers/largemessage/pom.xml | 2 +-
.../handlers/largemessage_idempotent/pom.xml | 2 +-
powertools-e2e-tests/handlers/logging/pom.xml | 2 +-
powertools-e2e-tests/handlers/metrics/pom.xml | 2 +-
.../handlers/parameters/pom.xml | 2 +-
powertools-e2e-tests/handlers/pom.xml | 4 +-
powertools-e2e-tests/handlers/tracing/pom.xml | 2 +-
.../handlers/validation-alb-event/pom.xml | 2 +-
.../handlers/validation-apigw-event/pom.xml | 2 +-
.../powertools/testutils/Infrastructure.java | 2 +
powertools-kafka/pom.xml | 223 ++++
.../powertools/kafka/Deserialization.java | 31 +
.../powertools/kafka/DeserializationType.java | 17 +
.../kafka/PowertoolsSerializer.java | 67 ++
.../kafka/internal/DeserializationUtils.java | 96 ++
.../AbstractKafkaDeserializer.java | 294 +++++
.../serializers/KafkaAvroDeserializer.java | 46 +
.../serializers/KafkaJsonDeserializer.java | 29 +
.../KafkaProtobufDeserializer.java | 43 +
.../LambdaDefaultDeserializer.java | 65 ++
.../serializers/PowertoolsDeserializer.java | 27 +
...rvices.lambda.runtime.CustomPojoSerializer | 1 +
.../src/test/avro/TestProduct.avsc | 10 +
.../powertools/kafka/DeserializationTest.java | 71 ++
.../kafka/DeserializationTypeTest.java | 50 +
.../kafka/PowertoolsSerializerTest.java | 417 +++++++
.../internal/DeserializationUtilsTest.java | 145 +++
.../AbstractKafkaDeserializerTest.java | 473 ++++++++
.../KafkaAvroDeserializerTest.java | 73 ++
.../KafkaJsonDeserializerTest.java | 66 ++
.../KafkaProtobufDeserializerTest.java | 75 ++
.../kafka/testutils/AvroHandler.java | 30 +
.../kafka/testutils/DefaultHandler.java | 29 +
.../kafka/testutils/InputStreamHandler.java | 30 +
.../kafka/testutils/JsonHandler.java | 29 +
.../kafka/testutils/ProtobufHandler.java | 30 +
.../kafka/testutils/StringHandler.java | 23 +
.../kafka/testutils/TestProductPojo.java | 87 ++
.../powertools/kafka/testutils/TestUtils.java | 75 ++
.../src/test/proto/TestProduct.proto | 13 +
.../test/resources/simplelogger.properties | 13 +
.../internal/LambdaEcsEncoderTest.java | 2 +-
.../internal/LambdaJsonEncoderTest.java | 2 +-
.../logging/internal/JsonSerializer.java | 11 +-
76 files changed, 7413 insertions(+), 21 deletions(-)
create mode 100644 docs/utilities/kafka.md
create mode 100644 examples/powertools-examples-kafka/README.md
create mode 100644 examples/powertools-examples-kafka/events/kafka-avro-event.json
create mode 100644 examples/powertools-examples-kafka/events/kafka-json-event.json
create mode 100644 examples/powertools-examples-kafka/events/kafka-protobuf-event.json
create mode 100644 examples/powertools-examples-kafka/pom.xml
create mode 100644 examples/powertools-examples-kafka/src/main/avro/AvroProduct.avsc
create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/AvroDeserializationFunction.java
create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/JsonDeserializationFunction.java
create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/Product.java
create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/ProtobufDeserializationFunction.java
create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/avro/AvroProduct.java
create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java
create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java
create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java
create mode 100644 examples/powertools-examples-kafka/src/main/proto/ProtobufProduct.proto
create mode 100644 examples/powertools-examples-kafka/src/main/resources/log4j2.xml
create mode 100644 examples/powertools-examples-kafka/template.yaml
create mode 100644 examples/powertools-examples-kafka/tools/README.md
create mode 100644 examples/powertools-examples-kafka/tools/pom.xml
create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java
create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java
create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java
create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java
create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java
create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java
create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java
create mode 100644 powertools-kafka/pom.xml
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java
create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java
create mode 100644 powertools-kafka/src/main/resources/META-INF/services/com.amazonaws.services.lambda.runtime.CustomPojoSerializer
create mode 100644 powertools-kafka/src/test/avro/TestProduct.avsc
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTest.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTypeTest.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/AvroHandler.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/DefaultHandler.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/InputStreamHandler.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/JsonHandler.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/ProtobufHandler.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/StringHandler.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestProductPojo.java
create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java
create mode 100644 powertools-kafka/src/test/proto/TestProduct.proto
create mode 100644 powertools-kafka/src/test/resources/simplelogger.properties
diff --git a/docs/utilities/kafka.md b/docs/utilities/kafka.md
new file mode 100644
index 000000000..da179bc5c
--- /dev/null
+++ b/docs/utilities/kafka.md
@@ -0,0 +1,1001 @@
+---
+title: Kafka Consumer
+description: Utility
+status: new
+---
+
+
+
+The Kafka utility transparently handles message deserialization, provides an intuitive developer experience, and integrates seamlessly with the rest of the Powertools for AWS Lambda ecosystem.
+
+```mermaid
+flowchart LR
+ KafkaTopic["Kafka Topic"] --> MSK["Amazon MSK"]
+ KafkaTopic --> MSKServerless["Amazon MSK Serverless"]
+ KafkaTopic --> SelfHosted["Self-hosted Kafka"]
+ MSK --> EventSourceMapping["Event Source Mapping"]
+ MSKServerless --> EventSourceMapping
+ SelfHosted --> EventSourceMapping
+ EventSourceMapping --> Lambda["Lambda Function"]
+ Lambda --> KafkaUtility["Kafka Utility"]
+ KafkaUtility --> Deserialization["Deserialization"]
+ Deserialization --> YourLogic["Your Business Logic"]
+```
+
+## Key features
+
+- Automatic deserialization of Kafka messages (JSON, Avro, and Protocol Buffers)
+- Simplified event record handling with familiar Kafka `ConsumerRecords` interface
+- Support for key and value deserialization
+- Support for ESM with and without Schema Registry integration
+- Proper error handling for deserialization issues
+
+## Terminology
+
+**Event Source Mapping (ESM)** A Lambda feature that reads from streaming sources (like Kafka) and invokes your Lambda function. It manages polling, batching, and error handling automatically, eliminating the need for consumer management code.
+
+**Record Key and Value** A Kafka messages contain two important parts: an optional key that determines the partition and a value containing the actual message data. Both are base64-encoded in Lambda events and can be independently deserialized.
+
+**Deserialization** Is the process of converting binary data (base64-encoded in Lambda events) into usable Java objects according to a specific format like JSON, Avro, or Protocol Buffers. Powertools handles this conversion automatically.
+
+**DeserializationType enum** Contains parameters that tell Powertools how to interpret message data, including the format type (JSON, Avro, Protocol Buffers).
+
+**Schema Registry** Is a centralized service that stores and validates schemas, ensuring producers and consumers maintain compatibility when message formats evolve over time.
+
+## Moving from traditional Kafka consumers
+
+Lambda processes Kafka messages as discrete events rather than continuous streams, requiring a different approach to consumer development that Powertools for AWS helps standardize.
+
+| Aspect | Traditional Kafka Consumers | Lambda Kafka Consumer |
+| --------------------- | ----------------------------------- | -------------------------------------------------------------- |
+| **Model** | Pull-based (you poll for messages) | Push-based (Lambda invoked with messages) |
+| **Scaling** | Manual scaling configuration | Automatic scaling to partition count |
+| **State** | Long-running application with state | Stateless, ephemeral executions |
+| **Offsets** | Manual offset management | Automatic offset commitment |
+| **Schema Validation** | Client-side schema validation | Optional Schema Registry integration with Event Source Mapping |
+| **Error Handling** | Per-message retry control | Batch-level retry policies |
+
+## Getting started
+
+### Installation
+
+Add the Powertools for AWS Lambda Kafka dependency to your project. Make sure to also add the `kafka-clients` library as a dependency. The utility supports `kafka-clients >= 3.0.0`.
+
+=== "Maven"
+
+ ```xml
+
+ software.amazon.lambda
+ powertools-kafka
+ {{ powertools.version }}
+
+
+
+ org.apache.kafka
+ kafka-clients
+ 4.0.0
+
+ ```
+
+=== "Gradle"
+
+ ```gradle
+ dependencies {
+ implementation 'software.amazon.lambda:powertools-kafka:{{ powertools.version }}'
+ // Kafka clients dependency - compatibility works for >= 3.0.0
+ implementation 'org.apache.kafka:kafka-clients:4.0.0'
+ }
+ ```
+
+### Required resources
+
+To use the Kafka utility, you need an AWS Lambda function configured with a Kafka event source. This can be Amazon MSK, MSK Serverless, or a self-hosted Kafka cluster.
+
+=== "getting_started_with_msk.yaml"
+
+ ```yaml
+ AWSTemplateFormatVersion: '2010-09-09'
+ Transform: AWS::Serverless-2016-10-31
+ Resources:
+ KafkaConsumerFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: org.example.KafkaHandler::handleRequest
+ Runtime: java21
+ Timeout: 30
+ Events:
+ MSKEvent:
+ Type: MSK
+ Properties:
+ StartingPosition: LATEST
+ Stream: !GetAtt MyMSKCluster.Arn
+ Topics:
+ - my-topic-1
+ - my-topic-2
+ Policies:
+ - AWSLambdaMSKExecutionRole
+ ```
+
+### Using ESM with Schema Registry
+
+The Event Source Mapping configuration determines which mode is used. With `JSON`, Lambda converts all messages to JSON before invoking your function. With `SOURCE` mode, Lambda preserves the original format, requiring you function to handle the appropriate deserialization.
+
+Powertools for AWS supports both Schema Registry integration modes in your Event Source Mapping configuration.
+
+### Processing Kafka events
+
+The Kafka utility transforms raw Lambda Kafka events into an intuitive format for processing. To handle messages effectively, you'll need to configure the `@Deserialization` annotation that matches your data format. Based on the deserializer you choose, incoming records are directly transformed into your business objects which can be auto-generated classes from Avro / Protobuf or simple POJOs.
+
+
+???+ tip "Using Avro is recommended"
+ We recommend Avro for production Kafka implementations due to its schema evolution capabilities, compact binary format, and integration with Schema Registry. This offers better type safety and forward/backward compatibility compared to JSON.
+
+=== "Avro Messages"
+
+ ```java hl_lines="18 21"
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.logging.Logging;
+
+ public class AvroKafkaHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(AvroKafkaHandler.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_AVRO)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ User user = record.value(); // User class is auto-generated from Avro schema
+ LOGGER.info("Processing user: {}, age {}", user.getName(), user.getAge());
+ }
+ return "OK";
+ }
+ }
+ ```
+
+=== "Protocol Buffers"
+
+ ```java hl_lines="18 21"
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.logging.Logging;
+
+ public class ProtobufKafkaHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ProtobufKafkaHandler.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_PROTOBUF)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ UserProto.User user = record.value(); // UserProto.User class is auto-generated from Protocol Buffer schema
+ LOGGER.info("Processing user: {}, age {}", user.getName(), user.getAge());
+ }
+ return "OK";
+ }
+ }
+ ```
+
+=== "JSON Messages"
+
+ ```java hl_lines="18 21"
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.logging.Logging;
+
+ public class JsonKafkaHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(JsonKafkaHandler.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_JSON)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ User user = record.value(); // Deserialized JSON object into User POJO
+ LOGGER.info("Processing user: {}, age {}", user.getName(), user.getAge());
+ }
+ return "OK";
+ }
+ }
+ ```
+
+
+???+ tip "Full examples on GitHub"
+ A full example including how to generate Avro and Protobuf Java classes can be found on GitHub at [https://github.com/aws-powertools/powertools-lambda-java/tree/main/examples/powertools-examples-kafka](https://github.com/aws-powertools/powertools-lambda-java/tree/main/examples/powertools-examples-kafka).
+
+### Deserializing keys and values
+
+The `@Deserialization` annotation deserializes both keys and values based on your type configuration. This flexibility allows you to work with different data formats in the same message.
+
+=== "Key and Value Deserialization"
+
+ ```java hl_lines="22"
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.logging.Logging;
+
+ public class KeyValueKafkaHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(KeyValueKafkaHandler.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_AVRO)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ // Access both deserialized components
+ ProductKey key = record.key(); // ProductKey class is auto-generated from Avro schema
+ ProductInfo product = record.value(); // ProductInfo class is auto-generated from Avro schema
+
+ LOGGER.info("Processing product ID: {}", key.getProductId());
+ LOGGER.info("Product: {} - ${}", product.getName(), product.getPrice());
+ }
+ return "OK";
+ }
+ }
+ ```
+
+=== "Value-Only Deserialization"
+
+ ```java hl_lines="22"
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.logging.Logging;
+
+ public class ValueOnlyKafkaHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ValueOnlyKafkaHandler.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_JSON)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ // Key remains as string (if present)
+ String key = record.key();
+ if (key != null) {
+ LOGGER.info("Message key: {}", key);
+ }
+
+ // Value is deserialized as JSON
+ Order order = record.value();
+ LOGGER.info("Order #{} - Total: ${}", order.getOrderId(), order.getTotal());
+ }
+ return "OK";
+ }
+ }
+ ```
+
+### Handling primitive types
+
+When working with primitive data types (strings, integers, etc.) rather than structured objects, you can use any deserialization type such as `KAFKA_JSON`. Simply place the primitive type like `Integer` or `String` in the `ConsumerRecords` generic type parameters, and the library will automatically handle primitive type deserialization.
+
+
+???+ tip "Common pattern: Keys with primitive values"
+ Using primitive types (strings, integers) as Kafka message keys is a common pattern for partitioning and identifying messages. Powertools automatically handles these primitive keys without requiring special configuration, making it easy to implement this popular design pattern.
+
+=== "Primitive key"
+
+ ```java hl_lines="18 22"
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.logging.Logging;
+
+ public class PrimitiveKeyHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(PrimitiveKeyHandler.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_JSON)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ // Key is automatically deserialized as Integer
+ Integer key = record.key();
+
+ // Value is deserialized as JSON
+ Customer customer = record.value();
+
+ LOGGER.info("Key: {}", key);
+ LOGGER.info("Name: {}", customer.getName());
+ LOGGER.info("Email: {}", customer.getEmail());
+ }
+ return "OK";
+ }
+ }
+ ```
+
+=== "Primitive key and value"
+
+ ```java hl_lines="18 22"
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.logging.Logging;
+
+ public class PrimitiveHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(PrimitiveHandler.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_JSON)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ // Key is automatically deserialized as String
+ String key = record.key();
+
+ // Value is automatically deserialized as String
+ String value = record.value();
+
+ LOGGER.info("Key: {}", key);
+ LOGGER.info("Value: {}", value);
+ }
+ return "OK";
+ }
+ }
+ ```
+
+### Message format support and comparison
+
+The Kafka utility supports multiple serialization formats to match your existing Kafka implementation. Choose the format that best suits your needs based on performance, schema evolution requirements, and ecosystem compatibility.
+
+
+???+ tip "Selecting the right format"
+ For new applications, consider Avro or Protocol Buffers over JSON. Both provide schema validation, evolution support, and significantly better performance with smaller message sizes. Avro is particularly well-suited for Kafka due to its built-in schema evolution capabilities.
+
+=== "Supported Formats"
+
+ | Format | DeserializationType | Description | Required Dependencies |
+ |--------|---------------------|-------------|----------------------|
+ | **JSON** | `KAFKA_JSON` | Human-readable text format | Jackson |
+ | **Avro** | `KAFKA_AVRO` | Compact binary format with schema | Apache Avro |
+ | **Protocol Buffers** | `KAFKA_PROTOBUF` | Efficient binary format | Protocol Buffers |
+ | **Lambda Default** | `LAMBDA_DEFAULT` | Uses Lambda's built-in deserialization (equivalent to removing the `@Deserialization` annotation) | None |
+
+=== "Format Comparison"
+
+ | Feature | JSON | Avro | Protocol Buffers |
+ |---------|------|------|-----------------|
+ | **Schema Definition** | Optional | Required schema file | Required .proto file |
+ | **Schema Evolution** | None | Strong support | Strong support |
+ | **Size Efficiency** | Low | High | Highest |
+ | **Processing Speed** | Slower | Fast | Fastest |
+ | **Human Readability** | High | Low | Low |
+ | **Implementation Complexity** | Low | Medium | Medium |
+ | **Additional Dependencies** | None | Apache Avro | Protocol Buffers |
+
+Choose the serialization format that best fits your needs:
+
+- **JSON**: Best for simplicity and when schema flexibility is important
+- **Avro**: Best for systems with evolving schemas and when compatibility is critical
+- **Protocol Buffers**: Best for performance-critical systems with structured data
+- **Lambda Default**: Best for simple string-based messages or when using Lambda's built-in deserialization
+
+## Advanced
+
+### Accessing record metadata
+
+Each Kafka record contains important metadata that you can access alongside the deserialized message content. This metadata helps with message processing, troubleshooting, and implementing advanced patterns like exactly-once processing.
+
+=== "Working with Record Metadata"
+
+ ```java
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.apache.kafka.common.header.Header;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.logging.Logging;
+
+ public class MetadataKafkaHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(MetadataKafkaHandler.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_AVRO)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ // Log record coordinates for tracing
+ LOGGER.info("Processing message from topic '{}'", record.topic());
+ LOGGER.info(" Partition: {}, Offset: {}", record.partition(), record.offset());
+ LOGGER.info(" Produced at: {}", record.timestamp());
+
+ // Process message headers
+ if (record.headers() != null) {
+ for (Header header : record.headers()) {
+ LOGGER.info(" Header: {} = {}",
+ header.key(), new String(header.value()));
+ }
+ }
+
+ // Access the Avro deserialized message content
+ Customer customer = record.value(); // Customer class is auto-generated from Avro schema
+ LOGGER.info("Processing order for: {}", customer.getName());
+ LOGGER.info("Order total: ${}", customer.getOrderTotal());
+ }
+ return "OK";
+ }
+ }
+ ```
+
+#### Available metadata properties
+
+| Property | Description | Example Use Case |
+| ----------------- | ----------------------------------------------- | ------------------------------------------- |
+| `topic()` | Topic name the record was published to | Routing logic in multi-topic consumers |
+| `partition()` | Kafka partition number | Tracking message distribution |
+| `offset()` | Position in the partition | De-duplication, exactly-once processing |
+| `timestamp()` | Unix timestamp when record was created | Event timing analysis |
+| `timestampType()` | Timestamp type (CREATE_TIME or LOG_APPEND_TIME) | Data lineage verification |
+| `headers()` | Key-value pairs attached to the message | Cross-cutting concerns like correlation IDs |
+| `key()` | Deserialized message key | Customer ID or entity identifier |
+| `value()` | Deserialized message content | The actual business data |
+
+### Error handling
+
+Handle errors gracefully when processing Kafka messages to ensure your application maintains resilience and provides clear diagnostic information. The Kafka utility integrates with standard Java exception handling patterns.
+
+
+!!! info "Treating Deserialization errors"
+ Read [Deserialization failures](#deserialization-failures). Deserialization failures will fail the whole batch and do not execute your handler.
+
+=== "Error Handling"
+
+ ```java
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.metrics.FlushMetrics;
+ import software.amazon.lambda.powertools.metrics.Metrics;
+ import software.amazon.lambda.powertools.metrics.MetricsFactory;
+ import software.amazon.lambda.powertools.metrics.model.MetricUnit;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+
+ public class ErrorHandlingKafkaHandler implements RequestHandler, String> {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ErrorHandlingKafkaHandler.class);
+ private static final Metrics metrics = MetricsFactory.getMetricsInstance();
+
+ @Override
+ @Logging
+ @FlushMetrics(namespace = "KafkaProcessing", service = "order-processing")
+ @Deserialization(type = DeserializationType.KAFKA_AVRO)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ metrics.addMetric("TotalRecords", records.count(), MetricUnit.COUNT);
+ int successfulRecords = 0;
+ int failedRecords = 0;
+
+ for (ConsumerRecord record : records) {
+ try {
+ Order order = record.value(); // Order class is auto-generated from Avro schema
+ processOrder(order);
+ successfulRecords++;
+ metrics.addMetric("ProcessedRecords", 1, MetricUnit.COUNT);
+ } catch (Exception e) {
+ failedRecords++;
+ LOGGER.error("Error processing Kafka message from topic: {}, partition: {}, offset: {}",
+ record.topic(), record.partition(), record.offset(), e);
+ metrics.addMetric("ProcessingErrors", 1, MetricUnit.COUNT);
+ // Optionally send to DLQ or error topic
+ sendToDlq(record);
+ }
+ }
+
+ return String.format("Processed %d records successfully, %d failed",
+ successfulRecords, failedRecords);
+ }
+
+ private void processOrder(Order order) {
+ // Your business logic here
+ LOGGER.info("Processing order: {}", order.getOrderId());
+ }
+
+ private void sendToDlq(ConsumerRecord record) {
+ // Implementation to send failed records to dead letter queue
+ }
+ }
+ ```
+
+### Integrating with Idempotency
+
+When processing Kafka messages in Lambda, failed batches can result in message reprocessing. The idempotency utility prevents duplicate processing by tracking which messages have already been handled, ensuring each message is processed exactly once.
+
+The Idempotency utility automatically stores the result of each successful operation, returning the cached result if the same message is processed again, which prevents potentially harmful duplicate operations like double-charging customers or double-counting metrics.
+
+=== "Idempotent Kafka Processing"
+
+ ```java
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.idempotency.Idempotency;
+ import software.amazon.lambda.powertools.idempotency.IdempotencyConfig;
+ import software.amazon.lambda.powertools.idempotency.Idempotent;
+ import software.amazon.lambda.powertools.idempotency.persistence.dynamodb.DynamoDBPersistenceStore;
+ import software.amazon.lambda.powertools.logging.Logging;
+
+ public class IdempotentKafkaHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(IdempotentKafkaHandler.class);
+
+ public IdempotentKafkaHandler() {
+ // Configure idempotency with DynamoDB persistence store
+ Idempotency.config()
+ .withPersistenceStore(
+ DynamoDBPersistenceStore.builder()
+ .withTableName("IdempotencyTable")
+ .build())
+ .configure();
+ }
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_JSON)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ // Payment class deserialized from JSON
+ Payment payment = record.value();
+
+ // Process each message with idempotency protection
+ processPayment(payment);
+ }
+ return "OK";
+ }
+
+ @Idempotent
+ private void processPayment(Payment payment) {
+ LOGGER.info("Processing payment {}", payment.getPaymentId());
+
+ // Your business logic here
+ PaymentService.process(payment.getPaymentId(), payment.getCustomerId(), payment.getAmount());
+ }
+ }
+ ```
+
+
+???+ tip "Ensuring exactly-once processing"
+ The `@Idempotent` annotation will use the JSON representation of the Payment object to make sure that the same object is only processed exactly once. Even if a batch fails and Lambda retries the messages, each unique payment will be processed exactly once.
+
+### Best practices
+
+#### Batch size configuration
+
+The number of Kafka records processed per Lambda invocation is controlled by your Event Source Mapping configuration. Properly sized batches optimize cost and performance.
+
+=== "Batch size configuration"
+
+ ```yaml
+ Resources:
+ OrderProcessingFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: org.example.OrderHandler::handleRequest
+ Runtime: java21
+ Events:
+ KafkaEvent:
+ Type: MSK
+ Properties:
+ Stream: !GetAtt OrdersMSKCluster.Arn
+ Topics:
+ - order-events
+ - payment-events
+ # Configuration for optimal throughput/latency balance
+ BatchSize: 100
+ MaximumBatchingWindowInSeconds: 5
+ StartingPosition: LATEST
+ # Enable partial batch success reporting
+ FunctionResponseTypes:
+ - ReportBatchItemFailures
+ ```
+
+Different workloads benefit from different batch configurations:
+
+- **High-volume, simple processing**: Use larger batches (100-500 records) with short timeout
+- **Complex processing with database operations**: Use smaller batches (10-50 records)
+- **Mixed message sizes**: Set appropriate batching window (1-5 seconds) to handle variability
+
+#### Cross-language compatibility
+
+When using binary serialization formats across multiple programming languages, ensure consistent schema handling to prevent deserialization failures.
+
+=== "Using Python naming convention"
+
+ ```java hl_lines="33 36 39 42 56"
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.RequestHandler;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import software.amazon.lambda.powertools.kafka.Deserialization;
+ import software.amazon.lambda.powertools.kafka.DeserializationType;
+ import software.amazon.lambda.powertools.logging.Logging;
+ import com.fasterxml.jackson.annotation.JsonProperty;
+ import java.time.Instant;
+
+ public class CrossLanguageKafkaHandler implements RequestHandler, String> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(CrossLanguageKafkaHandler.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_JSON)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord record : records) {
+ OrderEvent order = record.value(); // OrderEvent class handles JSON with Python field names
+ LOGGER.info("Processing order {} from {}",
+ order.getOrderId(), order.getOrderDate());
+ }
+ return "OK";
+ }
+ }
+
+ // Example class that handles Python snake_case field names
+ public class OrderEvent {
+ @JsonProperty("order_id")
+ private String orderId;
+
+ @JsonProperty("customer_id")
+ private String customerId;
+
+ @JsonProperty("total_amount")
+ private double totalAmount;
+
+ @JsonProperty("order_date")
+ private long orderDateMillis;
+
+ // Getters and setters
+ public String getOrderId() { return orderId; }
+ public void setOrderId(String orderId) { this.orderId = orderId; }
+
+ public String getCustomerId() { return customerId; }
+ public void setCustomerId(String customerId) { this.customerId = customerId; }
+
+ public double getTotalAmount() { return totalAmount; }
+ public void setTotalAmount(double totalAmount) { this.totalAmount = totalAmount; }
+
+ public Instant getOrderDate() {
+ return Instant.ofEpochMilli(orderDateMillis);
+ }
+ public void setOrderDate(long orderDateMillis) {
+ this.orderDateMillis = orderDateMillis;
+ }
+ }
+ ```
+
+Common cross-language challenges to address:
+
+- **Field naming conventions**: camelCase in Java vs snake_case in Python
+- **Date/time**: representation differences
+- **Numeric precision handling**: especially decimals
+
+### Troubleshooting
+
+#### Deserialization failures
+
+The Java Kafka utility registers a [custom Lambda serializer](https://docs.aws.amazon.com/lambda/latest/dg/java-custom-serialization.html) that performs **eager deserialization** of all records in the batch before your handler method is invoked.
+
+This means that if any record in the batch fails deserialization, a `RuntimeException` will be thrown with a concrete error message explaining why deserialization failed, and your handler method will never be called.
+
+**Key implications:**
+
+- **Batch-level failure**: If one record fails deserialization, the entire batch fails
+- **Early failure detection**: Deserialization errors are caught before your business logic runs
+- **Clear error messages**: The `RuntimeException` provides specific details about what went wrong
+- **No partial processing**: You cannot process some records while skipping failed ones within the same batch
+
+**Example of deserialization failure:**
+
+```java
+// If any record in the batch has invalid Avro data, you'll see:
+// RuntimeException: Failed to deserialize Kafka record: Invalid Avro schema for record at offset 12345
+```
+
+
+!!! warning "Handler method not invoked on deserialization failure"
+ When deserialization fails, your `handleRequest` method will not be invoked at all. The `RuntimeException` is thrown before your handler code runs, preventing any processing of the batch.
+
+**Handling deserialization failures:**
+
+Since deserialization happens before your handler is called, you cannot catch these exceptions within your handler method. Instead, configure your Event Source Mapping with appropriate error handling:
+
+- **Dead Letter Queue (DLQ)**: Configure a DLQ to capture failed batches for later analysis
+- **Maximum Retry Attempts**: Set appropriate retry limits to avoid infinite retries
+- **Batch Size**: Use smaller batch sizes to minimize the impact of individual record failures
+
+```yaml
+# Example SAM template configuration for error handling
+Events:
+ KafkaEvent:
+ Type: MSK
+ Properties:
+ # ... other properties
+ BatchSize: 10 # Smaller batches reduce failure impact
+ MaximumRetryAttempts: 3
+ DestinationConfig:
+ OnFailure:
+ Type: SQS
+ Destination: !GetAtt DeadLetterQueue.Arn
+```
+
+#### Schema compatibility issues
+
+Schema compatibility issues often manifest as successful connections but failed deserialization. Common causes include:
+
+- **Schema evolution without backward compatibility**: New producer schema is incompatible with consumer schema
+- **Field type mismatches**: For example, a field changed from String to Integer across systems
+- **Missing required fields**: Fields required by the consumer schema but absent in the message
+- **Default value discrepancies**: Different handling of default values between languages
+
+When using Schema Registry, verify schema compatibility rules are properly configured for your topics and that all applications use the same registry.
+
+#### Memory and timeout optimization
+
+Lambda functions processing Kafka messages may encounter resource constraints, particularly with large batches or complex processing logic.
+
+For memory errors:
+
+- Increase Lambda memory allocation, which also provides more CPU resources
+- Process fewer records per batch by adjusting the `BatchSize` parameter in your event source mapping
+- Consider optimizing your message format to reduce memory footprint
+
+For timeout issues:
+
+- Extend your Lambda function timeout setting to accommodate processing time
+- Implement chunked or asynchronous processing patterns for time-consuming operations
+- Monitor and optimize database operations, external API calls, or other I/O operations in your handler
+
+
+???+ tip "Monitoring memory usage"
+ Use CloudWatch metrics to track your function's memory utilization. If it consistently exceeds 80% of allocated memory, consider increasing the memory allocation or optimizing your code.
+
+## Kafka workflow
+
+### Using ESM with Schema Registry validation (SOURCE)
+
+
+```mermaid
+sequenceDiagram
+ participant Kafka
+ participant ESM as Event Source Mapping
+ participant SchemaRegistry as Schema Registry
+ participant Lambda
+ participant KafkaUtility
+ participant YourCode
+ Kafka->>+ESM: Send batch of records
+ ESM->>+SchemaRegistry: Validate schema
+ SchemaRegistry-->>-ESM: Confirm schema is valid
+ ESM->>+Lambda: Invoke with validated records (still encoded)
+ Lambda->>+KafkaUtility: Pass Kafka event
+ KafkaUtility->>KafkaUtility: Parse event structure
+ loop For each record
+ KafkaUtility->>KafkaUtility: Decode base64 data
+ KafkaUtility->>KafkaUtility: Deserialize based on DeserializationType
+ end
+ KafkaUtility->>+YourCode: Provide ConsumerRecords
+ YourCode->>YourCode: Process records
+ YourCode-->>-KafkaUtility: Return result
+ KafkaUtility-->>-Lambda: Pass result back
+ Lambda-->>-ESM: Return response
+ ESM-->>-Kafka: Acknowledge processed batch
+```
+
+
+### Using ESM with Schema Registry deserialization (JSON)
+
+
+```mermaid
+sequenceDiagram
+ participant Kafka
+ participant ESM as Event Source Mapping
+ participant SchemaRegistry as Schema Registry
+ participant Lambda
+ participant KafkaUtility
+ participant YourCode
+ Kafka->>+ESM: Send batch of records
+ ESM->>+SchemaRegistry: Validate and deserialize
+ SchemaRegistry->>SchemaRegistry: Deserialize records
+ SchemaRegistry-->>-ESM: Return deserialized data
+ ESM->>+Lambda: Invoke with pre-deserialized JSON records
+ Lambda->>+KafkaUtility: Pass Kafka event
+ KafkaUtility->>KafkaUtility: Parse event structure
+ loop For each record
+ KafkaUtility->>KafkaUtility: Decode base64 data
+ KafkaUtility->>KafkaUtility: Record is already deserialized
+ KafkaUtility->>KafkaUtility: Map to POJO (if specified)
+ end
+ KafkaUtility->>+YourCode: Provide ConsumerRecords
+ YourCode->>YourCode: Process records
+ YourCode-->>-KafkaUtility: Return result
+ KafkaUtility-->>-Lambda: Pass result back
+ Lambda-->>-ESM: Return response
+ ESM-->>-Kafka: Acknowledge processed batch
+```
+
+
+### Using ESM without Schema Registry integration
+
+
+```mermaid
+sequenceDiagram
+ participant Kafka
+ participant Lambda
+ participant KafkaUtility
+ participant YourCode
+ Kafka->>+Lambda: Invoke with batch of records (direct integration)
+ Lambda->>+KafkaUtility: Pass raw Kafka event
+ KafkaUtility->>KafkaUtility: Parse event structure
+ loop For each record
+ KafkaUtility->>KafkaUtility: Decode base64 data
+ KafkaUtility->>KafkaUtility: Deserialize based on DeserializationType
+ end
+ KafkaUtility->>+YourCode: Provide ConsumerRecords
+ YourCode->>YourCode: Process records
+ YourCode-->>-KafkaUtility: Return result
+ KafkaUtility-->>-Lambda: Pass result back
+ Lambda-->>-Kafka: Acknowledge processed batch
+```
+
+
+## Testing your code
+
+Testing Kafka consumer functions is straightforward with JUnit. You can construct Kafka `ConsumerRecords` in the default way provided by the kafka-clients library without needing a real Kafka cluster.
+
+=== "Testing your code"
+
+ ```java
+ package org.example;
+
+ import com.amazonaws.services.lambda.runtime.Context;
+ import com.amazonaws.services.lambda.runtime.events.KafkaEvent;
+ import org.apache.kafka.clients.consumer.ConsumerRecord;
+ import org.apache.kafka.clients.consumer.ConsumerRecords;
+ import org.apache.kafka.common.TopicPartition;
+ import org.junit.jupiter.api.Test;
+ import org.junit.jupiter.api.extension.ExtendWith;
+ import org.mockito.Mock;
+ import org.mockito.junit.jupiter.MockitoExtension;
+ import java.util.*;
+
+ import static org.junit.jupiter.api.Assertions.*;
+ import static org.mockito.Mockito.*;
+
+ @ExtendWith(MockitoExtension.class)
+ class KafkaHandlerTest {
+
+ @Mock
+ private Context context;
+
+ @Test
+ void testProcessJsonMessage() {
+ // Create a test Kafka event with JSON data
+ Order testOrder = new Order("12345", 99.95);
+ ConsumerRecord record = new ConsumerRecord<>(
+ "orders-topic", 0, 15L, null, testOrder);
+
+ Map>> recordsMap = new HashMap<>();
+ recordsMap.put(new TopicPartition("orders-topic", 0), Arrays.asList(record));
+ ConsumerRecords records = new ConsumerRecords<>(recordsMap);
+
+ // Create handler and invoke
+ JsonKafkaHandler handler = new JsonKafkaHandler();
+ String response = handler.handleRequest(records, context);
+
+ // Verify the response
+ assertEquals("OK", response);
+ }
+
+ @Test
+ void testProcessMultipleRecords() {
+ // Create a test event with multiple records
+ Customer customer1 = new Customer("A1", "Alice");
+ Customer customer2 = new Customer("B2", "Bob");
+
+ List> recordList = Arrays.asList(
+ new ConsumerRecord<>("customers-topic", 0, 10L, null, customer1),
+ new ConsumerRecord<>("customers-topic", 0, 11L, null, customer2)
+ );
+
+ Map>> recordsMap = new HashMap<>();
+ recordsMap.put(new TopicPartition("customers-topic", 0), recordList);
+ ConsumerRecords records = new ConsumerRecords<>(recordsMap);
+
+ // Create handler and invoke
+ JsonKafkaHandler handler = new JsonKafkaHandler();
+ String response = handler.handleRequest(records, context);
+
+ // Verify the response
+ assertEquals("OK", response);
+ }
+ }
+ ```
+
+## Extra Resources
+
+### Lambda Custom Serializers Compatibility
+
+This Kafka utility uses [Lambda custom serializers](https://docs.aws.amazon.com/lambda/latest/dg/java-custom-serialization.html) to provide automatic deserialization of Kafka messages.
+
+**Important compatibility considerations:**
+
+- **Existing custom serializers**: This utility will not be compatible if you already use your own custom Lambda serializer in your project
+- **Non-Kafka handlers**: Installing this library will not affect default Lambda serialization behavior for non-Kafka related handlers
+- **Kafka-specific**: The custom serialization only applies to handlers annotated with `@Deserialization`
+- **Lambda default fallback**: Using `@Deserialization(type = DeserializationType.LAMBDA_DEFAULT)` will proxy to Lambda's default serialization behavior
+
+**Need help with compatibility?**
+
+If you are blocked from adopting this utility due to existing custom serializers or other compatibility concerns, please contact us with your specific use-cases. We'd like to understand your requirements and explore potential solutions.
+
+For more information about Lambda custom serialization, see the [official AWS documentation](https://docs.aws.amazon.com/lambda/latest/dg/java-custom-serialization.html).
diff --git a/examples/pom.xml b/examples/pom.xml
index 6bedc015e..ea1e8d542 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -39,6 +39,7 @@
powertools-examples-parameters/sampowertools-examples-parameters/sam-graalvmpowertools-examples-serialization
+ powertools-examples-kafkapowertools-examples-batchpowertools-examples-validationpowertools-examples-cloudformation
@@ -58,4 +59,4 @@
-
\ No newline at end of file
+
diff --git a/examples/powertools-examples-kafka/README.md b/examples/powertools-examples-kafka/README.md
new file mode 100644
index 000000000..76cd81cb9
--- /dev/null
+++ b/examples/powertools-examples-kafka/README.md
@@ -0,0 +1,77 @@
+# Powertools for AWS Lambda (Java) - Kafka Example
+
+This project demonstrates how to use Powertools for AWS Lambda (Java) to deserialize Kafka Lambda events directly into strongly typed Kafka ConsumerRecords using different serialization formats.
+
+## Overview
+
+The example showcases automatic deserialization of Kafka Lambda events into ConsumerRecords using three formats:
+- JSON - Using standard JSON serialization
+- Avro - Using Apache Avro schema-based serialization
+- Protobuf - Using Google Protocol Buffers serialization
+
+Each format has its own Lambda function handler that demonstrates how to use the `@Deserialization` annotation with the appropriate `DeserializationType`, eliminating the need to handle complex deserialization logic manually.
+
+## Build and Deploy
+
+### Prerequisites
+- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
+- Java 11+
+- Maven
+
+### Build
+
+```bash
+# Build the application
+sam build
+```
+
+### Deploy
+
+```bash
+# Deploy the application to AWS
+sam deploy --guided
+```
+
+During the guided deployment, you'll be prompted to provide values for required parameters. After deployment, SAM will output the ARNs of the deployed Lambda functions.
+
+### Build with Different Serialization Formats
+
+The project includes Maven profiles to build with different serialization formats:
+
+```bash
+# Build with JSON only (no Avro or Protobuf)
+mvn clean package -P base
+
+# Build with Avro only
+mvn clean package -P avro-only
+
+# Build with Protobuf only
+mvn clean package -P protobuf-only
+
+# Build with all formats (default)
+mvn clean package -P full
+```
+
+## Testing
+
+The `events` directory contains sample events for each serialization format:
+- `kafka-json-event.json` - Sample event with JSON-serialized products
+- `kafka-avro-event.json` - Sample event with Avro-serialized products
+- `kafka-protobuf-event.json` - Sample event with Protobuf-serialized products
+
+You can use these events to test the Lambda functions:
+
+```bash
+# Test the JSON deserialization function
+sam local invoke JsonDeserializationFunction --event events/kafka-json-event.json
+
+# Test the Avro deserialization function
+sam local invoke AvroDeserializationFunction --event events/kafka-avro-event.json
+
+# Test the Protobuf deserialization function
+sam local invoke ProtobufDeserializationFunction --event events/kafka-protobuf-event.json
+```
+
+## Sample Generator Tool
+
+The project includes a tool to generate sample JSON, Avro, and Protobuf serialized data. See the [tools/README.md](tools/README.md) for more information.
\ No newline at end of file
diff --git a/examples/powertools-examples-kafka/events/kafka-avro-event.json b/examples/powertools-examples-kafka/events/kafka-avro-event.json
new file mode 100644
index 000000000..8d6ef2210
--- /dev/null
+++ b/examples/powertools-examples-kafka/events/kafka-avro-event.json
@@ -0,0 +1,51 @@
+{
+ "eventSource": "aws:kafka",
+ "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4",
+ "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092",
+ "records": {
+ "mytopic-0": [
+ {
+ "topic": "mytopic",
+ "partition": 0,
+ "offset": 15,
+ "timestamp": 1545084650987,
+ "timestampType": "CREATE_TIME",
+ "key": "NDI=",
+ "value": "0g8MTGFwdG9wUrgehes/j0A=",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ },
+ {
+ "topic": "mytopic",
+ "partition": 0,
+ "offset": 16,
+ "timestamp": 1545084650988,
+ "timestampType": "CREATE_TIME",
+ "key": "NDI=",
+ "value": "1A8UU21hcnRwaG9uZVK4HoXrv4JA",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ },
+ {
+ "topic": "mytopic",
+ "partition": 0,
+ "offset": 17,
+ "timestamp": 1545084650989,
+ "timestampType": "CREATE_TIME",
+ "key": null,
+ "value": "1g8USGVhZHBob25lc0jhehSuv2JA",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ }
+ ]
+ }
+}
diff --git a/examples/powertools-examples-kafka/events/kafka-json-event.json b/examples/powertools-examples-kafka/events/kafka-json-event.json
new file mode 100644
index 000000000..7ffb9a3a6
--- /dev/null
+++ b/examples/powertools-examples-kafka/events/kafka-json-event.json
@@ -0,0 +1,51 @@
+{
+ "eventSource": "aws:kafka",
+ "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4",
+ "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092",
+ "records": {
+ "mytopic-0": [
+ {
+ "topic": "mytopic",
+ "partition": 0,
+ "offset": 15,
+ "timestamp": 1545084650987,
+ "timestampType": "CREATE_TIME",
+ "key": "NDI=",
+ "value": "eyJwcmljZSI6OTk5Ljk5LCJuYW1lIjoiTGFwdG9wIiwiaWQiOjEwMDF9",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ },
+ {
+ "topic": "mytopic",
+ "partition": 0,
+ "offset": 15,
+ "timestamp": 1545084650987,
+ "timestampType": "CREATE_TIME",
+ "key": "NDI=",
+ "value": "eyJwcmljZSI6NTk5Ljk5LCJuYW1lIjoiU21hcnRwaG9uZSIsImlkIjoxMDAyfQ==",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ },
+ {
+ "topic": "mytopic",
+ "partition": 0,
+ "offset": 15,
+ "timestamp": 1545084650987,
+ "timestampType": "CREATE_TIME",
+ "key": null,
+ "value": "eyJwcmljZSI6MTQ5Ljk5LCJuYW1lIjoiSGVhZHBob25lcyIsImlkIjoxMDAzfQ==",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ }
+ ]
+ }
+}
diff --git a/examples/powertools-examples-kafka/events/kafka-protobuf-event.json b/examples/powertools-examples-kafka/events/kafka-protobuf-event.json
new file mode 100644
index 000000000..b3e0139e3
--- /dev/null
+++ b/examples/powertools-examples-kafka/events/kafka-protobuf-event.json
@@ -0,0 +1,51 @@
+{
+ "eventSource": "aws:kafka",
+ "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4",
+ "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092",
+ "records": {
+ "mytopic-0": [
+ {
+ "topic": "mytopic",
+ "partition": 0,
+ "offset": 15,
+ "timestamp": 1545084650987,
+ "timestampType": "CREATE_TIME",
+ "key": "NDI=",
+ "value": "COkHEgZMYXB0b3AZUrgehes/j0A=",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ },
+ {
+ "topic": "mytopic",
+ "partition": 0,
+ "offset": 16,
+ "timestamp": 1545084650988,
+ "timestampType": "CREATE_TIME",
+ "key": "NDI=",
+ "value": "COoHEgpTbWFydHBob25lGVK4HoXrv4JA",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ },
+ {
+ "topic": "mytopic",
+ "partition": 0,
+ "offset": 17,
+ "timestamp": 1545084650989,
+ "timestampType": "CREATE_TIME",
+ "key": null,
+ "value": "COsHEgpIZWFkcGhvbmVzGUjhehSuv2JA",
+ "headers": [
+ {
+ "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
+ }
+ ]
+ }
+ ]
+ }
+}
diff --git a/examples/powertools-examples-kafka/pom.xml b/examples/powertools-examples-kafka/pom.xml
new file mode 100644
index 000000000..a745ac75d
--- /dev/null
+++ b/examples/powertools-examples-kafka/pom.xml
@@ -0,0 +1,232 @@
+
+ 4.0.0
+ software.amazon.lambda.examples
+ 2.0.0
+ powertools-examples-kafka
+ jar
+ Powertools for AWS Lambda (Java) - Examples - Kafka
+
+
+ 11
+ 11
+ 1.9.20.1
+ 1.12.0
+ 4.31.0
+
+
+
+
+ software.amazon.lambda
+ powertools-kafka
+ ${project.version}
+
+
+ org.apache.kafka
+ kafka-clients
+ 4.0.0
+
+
+ org.apache.avro
+ avro
+ ${avro.version}
+
+
+ com.google.protobuf
+ protobuf-java
+ ${protobuf.version}
+
+
+
+
+ software.amazon.lambda
+ powertools-logging-log4j
+ ${project.version}
+
+
+ org.aspectj
+ aspectjrt
+ ${aspectj.version}
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+ 3.1.4
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.6.0
+
+
+ package
+
+ shade
+
+
+ false
+
+
+
+
+
+
+
+
+ org.apache.logging.log4j
+ log4j-transform-maven-shade-plugin-extensions
+ 0.2.0
+
+
+
+
+ dev.aspectj
+ aspectj-maven-plugin
+ 1.14
+
+ ${maven.compiler.source}
+ ${maven.compiler.target}
+ ${maven.compiler.target}
+
+
+ software.amazon.lambda
+ powertools-logging
+
+
+
+
+
+
+ compile
+
+
+
+
+
+ org.aspectj
+ aspectjtools
+ ${aspectj.version}
+
+
+
+
+
+ org.apache.avro
+ avro-maven-plugin
+ ${avro.version}
+
+
+ generate-sources
+
+ schema
+
+
+ ${project.basedir}/src/main/avro/
+ ${project.basedir}/src/main/java/
+ String
+
+
+
+
+
+
+ io.github.ascopes
+ protobuf-maven-plugin
+ 3.3.0
+
+
+
+ generate
+
+ generate-sources
+
+ ${protobuf.version}
+
+ ${project.basedir}/src/main/proto
+
+ ${project.basedir}/src/main/java
+ false
+
+
+
+
+
+
+
+
+
+
+ base
+
+ base
+
+
+
+
+ org.apache.avro
+ avro
+ ${avro.version}
+ provided
+
+
+ com.google.protobuf
+ protobuf-java
+ ${protobuf.version}
+ provided
+
+
+
+
+
+
+ avro-only
+
+ avro-only
+
+
+
+ com.google.protobuf
+ protobuf-java
+ ${protobuf.version}
+ provided
+
+
+
+
+
+
+ protobuf-only
+
+ protobuf-only
+
+
+
+ org.apache.avro
+ avro
+ ${avro.version}
+ provided
+
+
+
+
+
+
+ full
+
+ true
+
+
+ full
+
+
+
+
diff --git a/examples/powertools-examples-kafka/src/main/avro/AvroProduct.avsc b/examples/powertools-examples-kafka/src/main/avro/AvroProduct.avsc
new file mode 100644
index 000000000..7155857ea
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/avro/AvroProduct.avsc
@@ -0,0 +1,10 @@
+{
+ "namespace": "org.demo.kafka.avro",
+ "type": "record",
+ "name": "AvroProduct",
+ "fields": [
+ {"name": "id", "type": "int"},
+ {"name": "name", "type": "string"},
+ {"name": "price", "type": "double"}
+ ]
+}
diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/AvroDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/AvroDeserializationFunction.java
new file mode 100644
index 000000000..72f383eef
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/AvroDeserializationFunction.java
@@ -0,0 +1,37 @@
+package org.demo.kafka;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.demo.kafka.avro.AvroProduct;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+import software.amazon.lambda.powertools.kafka.Deserialization;
+import software.amazon.lambda.powertools.kafka.DeserializationType;
+import software.amazon.lambda.powertools.logging.Logging;
+
+public class AvroDeserializationFunction implements RequestHandler, String> {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(AvroDeserializationFunction.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_AVRO)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord consumerRecord : records) {
+ LOGGER.info("ConsumerRecord: {}", consumerRecord);
+
+ AvroProduct product = consumerRecord.value();
+ LOGGER.info("AvroProduct: {}", product);
+
+ String key = consumerRecord.key();
+ LOGGER.info("Key: {}", key);
+ }
+
+ return "OK";
+ }
+
+}
diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/JsonDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/JsonDeserializationFunction.java
new file mode 100644
index 000000000..c1d7f13ae
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/JsonDeserializationFunction.java
@@ -0,0 +1,35 @@
+package org.demo.kafka;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+import software.amazon.lambda.powertools.kafka.Deserialization;
+import software.amazon.lambda.powertools.kafka.DeserializationType;
+import software.amazon.lambda.powertools.logging.Logging;
+
+public class JsonDeserializationFunction implements RequestHandler, String> {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(JsonDeserializationFunction.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_JSON)
+ public String handleRequest(ConsumerRecords consumerRecords, Context context) {
+ for (ConsumerRecord consumerRecord : consumerRecords) {
+ LOGGER.info("ConsumerRecord: {}", consumerRecord);
+
+ Product product = consumerRecord.value();
+ LOGGER.info("Product: {}", product);
+
+ String key = consumerRecord.key();
+ LOGGER.info("Key: {}", key);
+ }
+
+ return "OK";
+ }
+}
diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/Product.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/Product.java
new file mode 100644
index 000000000..c6166090c
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/Product.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.demo.kafka;
+
+public class Product {
+ private long id;
+ private String name;
+ private double price;
+
+ public Product() {
+ }
+
+ public Product(long id, String name, double price) {
+ this.id = id;
+ this.name = name;
+ this.price = price;
+ }
+
+ public long getId() {
+ return id;
+ }
+
+ public void setId(long id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public double getPrice() {
+ return price;
+ }
+
+ public void setPrice(double price) {
+ this.price = price;
+ }
+
+ @Override
+ public String toString() {
+ return "Product{" +
+ "id=" + id +
+ ", name='" + name + '\'' +
+ ", price=" + price +
+ '}';
+ }
+}
diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/ProtobufDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/ProtobufDeserializationFunction.java
new file mode 100644
index 000000000..1978e8890
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/ProtobufDeserializationFunction.java
@@ -0,0 +1,38 @@
+package org.demo.kafka;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.demo.kafka.protobuf.ProtobufProduct;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+import software.amazon.lambda.powertools.kafka.Deserialization;
+import software.amazon.lambda.powertools.kafka.DeserializationType;
+import software.amazon.lambda.powertools.logging.Logging;
+
+public class ProtobufDeserializationFunction
+ implements RequestHandler, String> {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ProtobufDeserializationFunction.class);
+
+ @Override
+ @Logging
+ @Deserialization(type = DeserializationType.KAFKA_PROTOBUF)
+ public String handleRequest(ConsumerRecords records, Context context) {
+ for (ConsumerRecord consumerRecord : records) {
+ LOGGER.info("ConsumerRecord: {}", consumerRecord);
+
+ ProtobufProduct product = consumerRecord.value();
+ LOGGER.info("ProtobufProduct: {}", product);
+
+ String key = consumerRecord.key();
+ LOGGER.info("Key: {}", key);
+ }
+
+ return "OK";
+ }
+
+}
diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/avro/AvroProduct.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/avro/AvroProduct.java
new file mode 100644
index 000000000..fad7e2fbf
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/avro/AvroProduct.java
@@ -0,0 +1,476 @@
+/**
+ * Autogenerated by Avro
+ *
+ * DO NOT EDIT DIRECTLY
+ */
+package org.demo.kafka.avro;
+
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.util.Utf8;
+import org.apache.avro.message.BinaryMessageEncoder;
+import org.apache.avro.message.BinaryMessageDecoder;
+import org.apache.avro.message.SchemaStore;
+
+@org.apache.avro.specific.AvroGenerated
+public class AvroProduct extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+ private static final long serialVersionUID = -2929699301240218341L;
+
+
+ public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"org.demo.kafka.avro\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"price\",\"type\":\"double\"}]}");
+ public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+
+ private static final SpecificData MODEL$ = new SpecificData();
+
+ private static final BinaryMessageEncoder ENCODER =
+ new BinaryMessageEncoder<>(MODEL$, SCHEMA$);
+
+ private static final BinaryMessageDecoder DECODER =
+ new BinaryMessageDecoder<>(MODEL$, SCHEMA$);
+
+ /**
+ * Return the BinaryMessageEncoder instance used by this class.
+ * @return the message encoder used by this class
+ */
+ public static BinaryMessageEncoder getEncoder() {
+ return ENCODER;
+ }
+
+ /**
+ * Return the BinaryMessageDecoder instance used by this class.
+ * @return the message decoder used by this class
+ */
+ public static BinaryMessageDecoder getDecoder() {
+ return DECODER;
+ }
+
+ /**
+ * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
+ * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
+ * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
+ */
+ public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
+ return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver);
+ }
+
+ /**
+ * Serializes this AvroProduct to a ByteBuffer.
+ * @return a buffer holding the serialized data for this instance
+ * @throws java.io.IOException if this instance could not be serialized
+ */
+ public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
+ return ENCODER.encode(this);
+ }
+
+ /**
+ * Deserializes a AvroProduct from a ByteBuffer.
+ * @param b a byte buffer holding serialized data for an instance of this class
+ * @return a AvroProduct instance decoded from the given buffer
+ * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
+ */
+ public static AvroProduct fromByteBuffer(
+ java.nio.ByteBuffer b) throws java.io.IOException {
+ return DECODER.decode(b);
+ }
+
+ private int id;
+ private java.lang.String name;
+ private double price;
+
+ /**
+ * Default constructor. Note that this does not initialize fields
+ * to their default values from the schema. If that is desired then
+ * one should use newBuilder().
+ */
+ public AvroProduct() {}
+
+ /**
+ * All-args constructor.
+ * @param id The new value for id
+ * @param name The new value for name
+ * @param price The new value for price
+ */
+ public AvroProduct(java.lang.Integer id, java.lang.String name, java.lang.Double price) {
+ this.id = id;
+ this.name = name;
+ this.price = price;
+ }
+
+ @Override
+ public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
+
+ @Override
+ public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+
+ // Used by DatumWriter. Applications should not call.
+ @Override
+ public java.lang.Object get(int field$) {
+ switch (field$) {
+ case 0: return id;
+ case 1: return name;
+ case 2: return price;
+ default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
+ }
+ }
+
+ // Used by DatumReader. Applications should not call.
+ @Override
+ @SuppressWarnings(value="unchecked")
+ public void put(int field$, java.lang.Object value$) {
+ switch (field$) {
+ case 0: id = (java.lang.Integer)value$; break;
+ case 1: name = value$ != null ? value$.toString() : null; break;
+ case 2: price = (java.lang.Double)value$; break;
+ default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
+ }
+ }
+
+ /**
+ * Gets the value of the 'id' field.
+ * @return The value of the 'id' field.
+ */
+ public int getId() {
+ return id;
+ }
+
+
+ /**
+ * Sets the value of the 'id' field.
+ * @param value the value to set.
+ */
+ public void setId(int value) {
+ this.id = value;
+ }
+
+ /**
+ * Gets the value of the 'name' field.
+ * @return The value of the 'name' field.
+ */
+ public java.lang.String getName() {
+ return name;
+ }
+
+
+ /**
+ * Sets the value of the 'name' field.
+ * @param value the value to set.
+ */
+ public void setName(java.lang.String value) {
+ this.name = value;
+ }
+
+ /**
+ * Gets the value of the 'price' field.
+ * @return The value of the 'price' field.
+ */
+ public double getPrice() {
+ return price;
+ }
+
+
+ /**
+ * Sets the value of the 'price' field.
+ * @param value the value to set.
+ */
+ public void setPrice(double value) {
+ this.price = value;
+ }
+
+ /**
+ * Creates a new AvroProduct RecordBuilder.
+ * @return A new AvroProduct RecordBuilder
+ */
+ public static org.demo.kafka.avro.AvroProduct.Builder newBuilder() {
+ return new org.demo.kafka.avro.AvroProduct.Builder();
+ }
+
+ /**
+ * Creates a new AvroProduct RecordBuilder by copying an existing Builder.
+ * @param other The existing builder to copy.
+ * @return A new AvroProduct RecordBuilder
+ */
+ public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct.Builder other) {
+ if (other == null) {
+ return new org.demo.kafka.avro.AvroProduct.Builder();
+ } else {
+ return new org.demo.kafka.avro.AvroProduct.Builder(other);
+ }
+ }
+
+ /**
+ * Creates a new AvroProduct RecordBuilder by copying an existing AvroProduct instance.
+ * @param other The existing instance to copy.
+ * @return A new AvroProduct RecordBuilder
+ */
+ public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct other) {
+ if (other == null) {
+ return new org.demo.kafka.avro.AvroProduct.Builder();
+ } else {
+ return new org.demo.kafka.avro.AvroProduct.Builder(other);
+ }
+ }
+
+ /**
+ * RecordBuilder for AvroProduct instances.
+ */
+ @org.apache.avro.specific.AvroGenerated
+ public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
+ implements org.apache.avro.data.RecordBuilder {
+
+ private int id;
+ private java.lang.String name;
+ private double price;
+
+ /** Creates a new Builder */
+ private Builder() {
+ super(SCHEMA$, MODEL$);
+ }
+
+ /**
+ * Creates a Builder by copying an existing Builder.
+ * @param other The existing Builder to copy.
+ */
+ private Builder(org.demo.kafka.avro.AvroProduct.Builder other) {
+ super(other);
+ if (isValidValue(fields()[0], other.id)) {
+ this.id = data().deepCopy(fields()[0].schema(), other.id);
+ fieldSetFlags()[0] = other.fieldSetFlags()[0];
+ }
+ if (isValidValue(fields()[1], other.name)) {
+ this.name = data().deepCopy(fields()[1].schema(), other.name);
+ fieldSetFlags()[1] = other.fieldSetFlags()[1];
+ }
+ if (isValidValue(fields()[2], other.price)) {
+ this.price = data().deepCopy(fields()[2].schema(), other.price);
+ fieldSetFlags()[2] = other.fieldSetFlags()[2];
+ }
+ }
+
+ /**
+ * Creates a Builder by copying an existing AvroProduct instance
+ * @param other The existing instance to copy.
+ */
+ private Builder(org.demo.kafka.avro.AvroProduct other) {
+ super(SCHEMA$, MODEL$);
+ if (isValidValue(fields()[0], other.id)) {
+ this.id = data().deepCopy(fields()[0].schema(), other.id);
+ fieldSetFlags()[0] = true;
+ }
+ if (isValidValue(fields()[1], other.name)) {
+ this.name = data().deepCopy(fields()[1].schema(), other.name);
+ fieldSetFlags()[1] = true;
+ }
+ if (isValidValue(fields()[2], other.price)) {
+ this.price = data().deepCopy(fields()[2].schema(), other.price);
+ fieldSetFlags()[2] = true;
+ }
+ }
+
+ /**
+ * Gets the value of the 'id' field.
+ * @return The value.
+ */
+ public int getId() {
+ return id;
+ }
+
+
+ /**
+ * Sets the value of the 'id' field.
+ * @param value The value of 'id'.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder setId(int value) {
+ validate(fields()[0], value);
+ this.id = value;
+ fieldSetFlags()[0] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'id' field has been set.
+ * @return True if the 'id' field has been set, false otherwise.
+ */
+ public boolean hasId() {
+ return fieldSetFlags()[0];
+ }
+
+
+ /**
+ * Clears the value of the 'id' field.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder clearId() {
+ fieldSetFlags()[0] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'name' field.
+ * @return The value.
+ */
+ public java.lang.String getName() {
+ return name;
+ }
+
+
+ /**
+ * Sets the value of the 'name' field.
+ * @param value The value of 'name'.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder setName(java.lang.String value) {
+ validate(fields()[1], value);
+ this.name = value;
+ fieldSetFlags()[1] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'name' field has been set.
+ * @return True if the 'name' field has been set, false otherwise.
+ */
+ public boolean hasName() {
+ return fieldSetFlags()[1];
+ }
+
+
+ /**
+ * Clears the value of the 'name' field.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder clearName() {
+ name = null;
+ fieldSetFlags()[1] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'price' field.
+ * @return The value.
+ */
+ public double getPrice() {
+ return price;
+ }
+
+
+ /**
+ * Sets the value of the 'price' field.
+ * @param value The value of 'price'.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder setPrice(double value) {
+ validate(fields()[2], value);
+ this.price = value;
+ fieldSetFlags()[2] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'price' field has been set.
+ * @return True if the 'price' field has been set, false otherwise.
+ */
+ public boolean hasPrice() {
+ return fieldSetFlags()[2];
+ }
+
+
+ /**
+ * Clears the value of the 'price' field.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder clearPrice() {
+ fieldSetFlags()[2] = false;
+ return this;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public AvroProduct build() {
+ try {
+ AvroProduct record = new AvroProduct();
+ record.id = fieldSetFlags()[0] ? this.id : (java.lang.Integer) defaultValue(fields()[0]);
+ record.name = fieldSetFlags()[1] ? this.name : (java.lang.String) defaultValue(fields()[1]);
+ record.price = fieldSetFlags()[2] ? this.price : (java.lang.Double) defaultValue(fields()[2]);
+ return record;
+ } catch (org.apache.avro.AvroMissingFieldException e) {
+ throw e;
+ } catch (java.lang.Exception e) {
+ throw new org.apache.avro.AvroRuntimeException(e);
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumWriter
+ WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
+
+ @Override public void writeExternal(java.io.ObjectOutput out)
+ throws java.io.IOException {
+ WRITER$.write(this, SpecificData.getEncoder(out));
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumReader
+ READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
+
+ @Override public void readExternal(java.io.ObjectInput in)
+ throws java.io.IOException {
+ READER$.read(this, SpecificData.getDecoder(in));
+ }
+
+ @Override protected boolean hasCustomCoders() { return true; }
+
+ @Override public void customEncode(org.apache.avro.io.Encoder out)
+ throws java.io.IOException
+ {
+ out.writeInt(this.id);
+
+ out.writeString(this.name);
+
+ out.writeDouble(this.price);
+
+ }
+
+ @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
+ throws java.io.IOException
+ {
+ org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
+ if (fieldOrder == null) {
+ this.id = in.readInt();
+
+ this.name = in.readString();
+
+ this.price = in.readDouble();
+
+ } else {
+ for (int i = 0; i < 3; i++) {
+ switch (fieldOrder[i].pos()) {
+ case 0:
+ this.id = in.readInt();
+ break;
+
+ case 1:
+ this.name = in.readString();
+ break;
+
+ case 2:
+ this.price = in.readDouble();
+ break;
+
+ default:
+ throw new java.io.IOException("Corrupt ResolvingDecoder.");
+ }
+ }
+ }
+ }
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java
new file mode 100644
index 000000000..6da9113fc
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java
@@ -0,0 +1,636 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// NO CHECKED-IN PROTOBUF GENCODE
+// source: ProtobufProduct.proto
+// Protobuf Java Version: 4.31.0
+
+package org.demo.kafka.protobuf;
+
+/**
+ * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct}
+ */
+@com.google.protobuf.Generated
+public final class ProtobufProduct extends
+ com.google.protobuf.GeneratedMessage implements
+ // @@protoc_insertion_point(message_implements:org.demo.kafka.protobuf.ProtobufProduct)
+ ProtobufProductOrBuilder {
+private static final long serialVersionUID = 0L;
+ static {
+ com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion(
+ com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC,
+ /* major= */ 4,
+ /* minor= */ 31,
+ /* patch= */ 0,
+ /* suffix= */ "",
+ ProtobufProduct.class.getName());
+ }
+ // Use ProtobufProduct.newBuilder() to construct.
+ private ProtobufProduct(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ }
+ private ProtobufProduct() {
+ name_ = "";
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class);
+ }
+
+ public static final int ID_FIELD_NUMBER = 1;
+ private int id_ = 0;
+ /**
+ * int32 id = 1;
+ * @return The id.
+ */
+ @java.lang.Override
+ public int getId() {
+ return id_;
+ }
+
+ public static final int NAME_FIELD_NUMBER = 2;
+ @SuppressWarnings("serial")
+ private volatile java.lang.Object name_ = "";
+ /**
+ * string name = 2;
+ * @return The name.
+ */
+ @java.lang.Override
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ name_ = s;
+ return s;
+ }
+ }
+ /**
+ * string name = 2;
+ * @return The bytes for name.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int PRICE_FIELD_NUMBER = 3;
+ private double price_ = 0D;
+ /**
+ * double price = 3;
+ * @return The price.
+ */
+ @java.lang.Override
+ public double getPrice() {
+ return price_;
+ }
+
+ private byte memoizedIsInitialized = -1;
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ if (id_ != 0) {
+ output.writeInt32(1, id_);
+ }
+ if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) {
+ com.google.protobuf.GeneratedMessage.writeString(output, 2, name_);
+ }
+ if (java.lang.Double.doubleToRawLongBits(price_) != 0) {
+ output.writeDouble(3, price_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (id_ != 0) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(1, id_);
+ }
+ if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) {
+ size += com.google.protobuf.GeneratedMessage.computeStringSize(2, name_);
+ }
+ if (java.lang.Double.doubleToRawLongBits(price_) != 0) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeDoubleSize(3, price_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.demo.kafka.protobuf.ProtobufProduct)) {
+ return super.equals(obj);
+ }
+ org.demo.kafka.protobuf.ProtobufProduct other = (org.demo.kafka.protobuf.ProtobufProduct) obj;
+
+ if (getId()
+ != other.getId()) return false;
+ if (!getName()
+ .equals(other.getName())) return false;
+ if (java.lang.Double.doubleToLongBits(getPrice())
+ != java.lang.Double.doubleToLongBits(
+ other.getPrice())) return false;
+ if (!getUnknownFields().equals(other.getUnknownFields())) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (37 * hash) + ID_FIELD_NUMBER;
+ hash = (53 * hash) + getId();
+ hash = (37 * hash) + NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getName().hashCode();
+ hash = (37 * hash) + PRICE_FIELD_NUMBER;
+ hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+ java.lang.Double.doubleToLongBits(getPrice()));
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseDelimitedWithIOException(PARSER, input);
+ }
+
+ public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+ public static Builder newBuilder(org.demo.kafka.protobuf.ProtobufProduct prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder implements
+ // @@protoc_insertion_point(builder_implements:org.demo.kafka.protobuf.ProtobufProduct)
+ org.demo.kafka.protobuf.ProtobufProductOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class);
+ }
+
+ // Construct using org.demo.kafka.protobuf.ProtobufProduct.newBuilder()
+ private Builder() {
+
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+
+ }
+ @java.lang.Override
+ public Builder clear() {
+ super.clear();
+ bitField0_ = 0;
+ id_ = 0;
+ name_ = "";
+ price_ = 0D;
+ return this;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor;
+ }
+
+ @java.lang.Override
+ public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() {
+ return org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance();
+ }
+
+ @java.lang.Override
+ public org.demo.kafka.protobuf.ProtobufProduct build() {
+ org.demo.kafka.protobuf.ProtobufProduct result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ @java.lang.Override
+ public org.demo.kafka.protobuf.ProtobufProduct buildPartial() {
+ org.demo.kafka.protobuf.ProtobufProduct result = new org.demo.kafka.protobuf.ProtobufProduct(this);
+ if (bitField0_ != 0) { buildPartial0(result); }
+ onBuilt();
+ return result;
+ }
+
+ private void buildPartial0(org.demo.kafka.protobuf.ProtobufProduct result) {
+ int from_bitField0_ = bitField0_;
+ if (((from_bitField0_ & 0x00000001) != 0)) {
+ result.id_ = id_;
+ }
+ if (((from_bitField0_ & 0x00000002) != 0)) {
+ result.name_ = name_;
+ }
+ if (((from_bitField0_ & 0x00000004) != 0)) {
+ result.price_ = price_;
+ }
+ }
+
+ @java.lang.Override
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.demo.kafka.protobuf.ProtobufProduct) {
+ return mergeFrom((org.demo.kafka.protobuf.ProtobufProduct)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.demo.kafka.protobuf.ProtobufProduct other) {
+ if (other == org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance()) return this;
+ if (other.getId() != 0) {
+ setId(other.getId());
+ }
+ if (!other.getName().isEmpty()) {
+ name_ = other.name_;
+ bitField0_ |= 0x00000002;
+ onChanged();
+ }
+ if (java.lang.Double.doubleToRawLongBits(other.getPrice()) != 0) {
+ setPrice(other.getPrice());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ onChanged();
+ return this;
+ }
+
+ @java.lang.Override
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ @java.lang.Override
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ case 8: {
+ id_ = input.readInt32();
+ bitField0_ |= 0x00000001;
+ break;
+ } // case 8
+ case 18: {
+ name_ = input.readStringRequireUtf8();
+ bitField0_ |= 0x00000002;
+ break;
+ } // case 18
+ case 25: {
+ price_ = input.readDouble();
+ bitField0_ |= 0x00000004;
+ break;
+ } // case 25
+ default: {
+ if (!super.parseUnknownField(input, extensionRegistry, tag)) {
+ done = true; // was an endgroup tag
+ }
+ break;
+ } // default:
+ } // switch (tag)
+ } // while (!done)
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.unwrapIOException();
+ } finally {
+ onChanged();
+ } // finally
+ return this;
+ }
+ private int bitField0_;
+
+ private int id_ ;
+ /**
+ * int32 id = 1;
+ * @return The id.
+ */
+ @java.lang.Override
+ public int getId() {
+ return id_;
+ }
+ /**
+ * int32 id = 1;
+ * @param value The id to set.
+ * @return This builder for chaining.
+ */
+ public Builder setId(int value) {
+
+ id_ = value;
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return this;
+ }
+ /**
+ * int32 id = 1;
+ * @return This builder for chaining.
+ */
+ public Builder clearId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ id_ = 0;
+ onChanged();
+ return this;
+ }
+
+ private java.lang.Object name_ = "";
+ /**
+ * string name = 2;
+ * @return The name.
+ */
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ name_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * string name = 2;
+ * @return The bytes for name.
+ */
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * string name = 2;
+ * @param value The name to set.
+ * @return This builder for chaining.
+ */
+ public Builder setName(
+ java.lang.String value) {
+ if (value == null) { throw new NullPointerException(); }
+ name_ = value;
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return this;
+ }
+ /**
+ * string name = 2;
+ * @return This builder for chaining.
+ */
+ public Builder clearName() {
+ name_ = getDefaultInstance().getName();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ return this;
+ }
+ /**
+ * string name = 2;
+ * @param value The bytes for name to set.
+ * @return This builder for chaining.
+ */
+ public Builder setNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) { throw new NullPointerException(); }
+ checkByteStringIsUtf8(value);
+ name_ = value;
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return this;
+ }
+
+ private double price_ ;
+ /**
+ * double price = 3;
+ * @return The price.
+ */
+ @java.lang.Override
+ public double getPrice() {
+ return price_;
+ }
+ /**
+ * double price = 3;
+ * @param value The price to set.
+ * @return This builder for chaining.
+ */
+ public Builder setPrice(double value) {
+
+ price_ = value;
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return this;
+ }
+ /**
+ * double price = 3;
+ * @return This builder for chaining.
+ */
+ public Builder clearPrice() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ price_ = 0D;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:org.demo.kafka.protobuf.ProtobufProduct)
+ }
+
+ // @@protoc_insertion_point(class_scope:org.demo.kafka.protobuf.ProtobufProduct)
+ private static final org.demo.kafka.protobuf.ProtobufProduct DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.demo.kafka.protobuf.ProtobufProduct();
+ }
+
+ public static org.demo.kafka.protobuf.ProtobufProduct getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public ProtobufProduct parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ Builder builder = newBuilder();
+ try {
+ builder.mergeFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(builder.buildPartial());
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(e)
+ .setUnfinishedMessage(builder.buildPartial());
+ }
+ return builder.buildPartial();
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
+ }
+
+}
+
diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java
new file mode 100644
index 000000000..9c1518db3
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java
@@ -0,0 +1,36 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// NO CHECKED-IN PROTOBUF GENCODE
+// source: ProtobufProduct.proto
+// Protobuf Java Version: 4.31.0
+
+package org.demo.kafka.protobuf;
+
+@com.google.protobuf.Generated
+public interface ProtobufProductOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:org.demo.kafka.protobuf.ProtobufProduct)
+ com.google.protobuf.MessageOrBuilder {
+
+ /**
+ * int32 id = 1;
+ * @return The id.
+ */
+ int getId();
+
+ /**
+ * string name = 2;
+ * @return The name.
+ */
+ java.lang.String getName();
+ /**
+ * string name = 2;
+ * @return The bytes for name.
+ */
+ com.google.protobuf.ByteString
+ getNameBytes();
+
+ /**
+ * double price = 3;
+ * @return The price.
+ */
+ double getPrice();
+}
diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java
new file mode 100644
index 000000000..6a99f35ec
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java
@@ -0,0 +1,63 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// NO CHECKED-IN PROTOBUF GENCODE
+// source: ProtobufProduct.proto
+// Protobuf Java Version: 4.31.0
+
+package org.demo.kafka.protobuf;
+
+@com.google.protobuf.Generated
+public final class ProtobufProductOuterClass {
+ private ProtobufProductOuterClass() {}
+ static {
+ com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion(
+ com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC,
+ /* major= */ 4,
+ /* minor= */ 31,
+ /* patch= */ 0,
+ /* suffix= */ "",
+ ProtobufProductOuterClass.class.getName());
+ }
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistryLite registry) {
+ }
+
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ registerAllExtensions(
+ (com.google.protobuf.ExtensionRegistryLite) registry);
+ }
+ static final com.google.protobuf.Descriptors.Descriptor
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor;
+ static final
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\025ProtobufProduct.proto\022\027org.demo.kafka." +
+ "protobuf\":\n\017ProtobufProduct\022\n\n\002id\030\001 \001(\005\022" +
+ "\014\n\004name\030\002 \001(\t\022\r\n\005price\030\003 \001(\001B6\n\027org.demo" +
+ ".kafka.protobufB\031ProtobufProductOuterCla" +
+ "ssP\001b\006proto3"
+ };
+ descriptor = com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ });
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor,
+ new java.lang.String[] { "Id", "Name", "Price", });
+ descriptor.resolveAllFeaturesImmutable();
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/examples/powertools-examples-kafka/src/main/proto/ProtobufProduct.proto b/examples/powertools-examples-kafka/src/main/proto/ProtobufProduct.proto
new file mode 100644
index 000000000..4d3338a6f
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/proto/ProtobufProduct.proto
@@ -0,0 +1,13 @@
+syntax = "proto3";
+
+package org.demo.kafka.protobuf;
+
+option java_package = "org.demo.kafka.protobuf";
+option java_outer_classname = "ProtobufProductOuterClass";
+option java_multiple_files = true;
+
+message ProtobufProduct {
+ int32 id = 1;
+ string name = 2;
+ double price = 3;
+}
\ No newline at end of file
diff --git a/examples/powertools-examples-kafka/src/main/resources/log4j2.xml b/examples/powertools-examples-kafka/src/main/resources/log4j2.xml
new file mode 100644
index 000000000..fe943d707
--- /dev/null
+++ b/examples/powertools-examples-kafka/src/main/resources/log4j2.xml
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/powertools-examples-kafka/template.yaml b/examples/powertools-examples-kafka/template.yaml
new file mode 100644
index 000000000..509b13ca3
--- /dev/null
+++ b/examples/powertools-examples-kafka/template.yaml
@@ -0,0 +1,59 @@
+AWSTemplateFormatVersion: "2010-09-09"
+Transform: AWS::Serverless-2016-10-31
+Description: >
+ Kafka Deserialization example with Kafka Lambda ESM
+
+Globals:
+ Function:
+ Timeout: 20
+ Runtime: java11
+ MemorySize: 512
+ Tracing: Active
+
+Resources:
+ JsonDeserializationFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: .
+ Handler: org.demo.kafka.JsonDeserializationFunction::handleRequest
+ Environment:
+ Variables:
+ JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1"
+ POWERTOOLS_LOG_LEVEL: DEBUG
+ POWERTOOLS_SERVICE_NAME: JsonDeserialization
+ POWERTOOLS_METRICS_NAMESPACE: JsonDeserializationFunction
+
+ AvroDeserializationFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: .
+ Handler: org.demo.kafka.AvroDeserializationFunction::handleRequest
+ Environment:
+ Variables:
+ JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1"
+ POWERTOOLS_LOG_LEVEL: DEBUG
+ POWERTOOLS_SERVICE_NAME: AvroDeserialization
+ POWERTOOLS_METRICS_NAMESPACE: AvroDeserializationFunction
+
+ ProtobufDeserializationFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: .
+ Handler: org.demo.kafka.ProtobufDeserializationFunction::handleRequest
+ Environment:
+ Variables:
+ JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1"
+ POWERTOOLS_LOG_LEVEL: DEBUG
+ POWERTOOLS_SERVICE_NAME: ProtobufDeserialization
+ POWERTOOLS_METRICS_NAMESPACE: ProtobufDeserializationFunction
+
+Outputs:
+ JsonFunction:
+ Description: "Kafka JSON Lambda Function ARN"
+ Value: !GetAtt JsonDeserializationFunction.Arn
+ AvroFunction:
+ Description: "Kafka Avro Lambda Function ARN"
+ Value: !GetAtt AvroDeserializationFunction.Arn
+ ProtobufFunction:
+ Description: "Kafka Protobuf Lambda Function ARN"
+ Value: !GetAtt ProtobufDeserializationFunction.Arn
diff --git a/examples/powertools-examples-kafka/tools/README.md b/examples/powertools-examples-kafka/tools/README.md
new file mode 100644
index 000000000..53d07b0c4
--- /dev/null
+++ b/examples/powertools-examples-kafka/tools/README.md
@@ -0,0 +1,66 @@
+# Kafka Sample Generator Tool
+
+This tool generates base64-encoded serialized products for testing the Kafka consumer functions with different serialization formats.
+
+## Supported Formats
+
+- **JSON**: Generates base64-encoded JSON serialized products
+- **Avro**: Generates base64-encoded Avro serialized products
+- **Protobuf**: Generates base64-encoded Protobuf serialized products
+
+## Usage
+
+Run the following Maven commands from this directory:
+
+```bash
+# Generate Avro and Protobuf classes from schemas
+mvn generate-sources
+
+# Compile the code
+mvn compile
+```
+
+### Generate JSON Samples
+
+```bash
+# Run the JSON sample generator
+mvn exec:java -Dexec.mainClass="org.demo.kafka.tools.GenerateJsonSamples"
+```
+
+The tool will output base64-encoded values for JSON products that can be used in `../events/kafka-json-event.json`.
+
+### Generate Avro Samples
+
+```bash
+# Run the Avro sample generator
+mvn exec:java -Dexec.mainClass="org.demo.kafka.tools.GenerateAvroSamples"
+```
+
+The tool will output base64-encoded values for Avro products that can be used in `../events/kafka-avro-event.json`.
+
+### Generate Protobuf Samples
+
+```bash
+# Run the Protobuf sample generator
+mvn exec:java -Dexec.mainClass="org.demo.kafka.tools.GenerateProtobufSamples"
+```
+
+The tool will output base64-encoded values for Protobuf products that can be used in `../events/kafka-protobuf-event.json`.
+
+## Output
+
+Each generator produces:
+
+1. Three different products (Laptop, Smartphone, Headphones)
+2. An integer key (42) and one entry with a nullish key to test for edge-cases
+3. A complete sample event structure that can be used directly for testing
+
+## Example
+
+After generating the samples, you can copy the output into the respective event files:
+
+- `../events/kafka-json-event.json` for JSON samples
+- `../events/kafka-avro-event.json` for Avro samples
+- `../events/kafka-protobuf-event.json` for Protobuf samples
+
+These event files can then be used to test the Lambda functions with the appropriate deserializer.
diff --git a/examples/powertools-examples-kafka/tools/pom.xml b/examples/powertools-examples-kafka/tools/pom.xml
new file mode 100644
index 000000000..97231e5bd
--- /dev/null
+++ b/examples/powertools-examples-kafka/tools/pom.xml
@@ -0,0 +1,104 @@
+
+
+ 4.0.0
+
+ software.amazon.lambda.examples
+ powertools-examples-kafka-tools
+ 2.0.0
+
+
+ 11
+ 11
+ 1.12.0
+ 4.31.0
+
+
+
+
+ org.apache.avro
+ avro
+ ${avro.version}
+
+
+ com.google.protobuf
+ protobuf-java
+ ${protobuf.version}
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ 2.19.0
+
+
+
+
+
+
+ org.apache.avro
+ avro-maven-plugin
+ ${avro.version}
+
+
+ generate-sources
+
+ schema
+
+
+ ${project.basedir}/../src/main/avro/
+ ${project.basedir}/src/main/java/
+ String
+
+
+
+
+
+ io.github.ascopes
+ protobuf-maven-plugin
+ 3.3.0
+
+
+
+ generate
+
+ generate-sources
+
+ ${protobuf.version}
+
+ ${project.basedir}/../src/main/proto
+
+ ${project.basedir}/src/main/java
+ false
+
+
+
+
+
+ org.codehaus.mojo
+ exec-maven-plugin
+ 3.1.0
+
+
+ generate-json-samples
+
+ org.demo.kafka.tools.GenerateJsonSamples
+
+
+
+ generate-avro-samples
+
+ org.demo.kafka.tools.GenerateAvroSamples
+
+
+
+ generate-protobuf-samples
+
+ org.demo.kafka.tools.GenerateProtobufSamples
+
+
+
+
+
+
+
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java
new file mode 100644
index 000000000..fad7e2fbf
--- /dev/null
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java
@@ -0,0 +1,476 @@
+/**
+ * Autogenerated by Avro
+ *
+ * DO NOT EDIT DIRECTLY
+ */
+package org.demo.kafka.avro;
+
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.util.Utf8;
+import org.apache.avro.message.BinaryMessageEncoder;
+import org.apache.avro.message.BinaryMessageDecoder;
+import org.apache.avro.message.SchemaStore;
+
+@org.apache.avro.specific.AvroGenerated
+public class AvroProduct extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+ private static final long serialVersionUID = -2929699301240218341L;
+
+
+ public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"org.demo.kafka.avro\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"price\",\"type\":\"double\"}]}");
+ public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+
+ private static final SpecificData MODEL$ = new SpecificData();
+
+ private static final BinaryMessageEncoder ENCODER =
+ new BinaryMessageEncoder<>(MODEL$, SCHEMA$);
+
+ private static final BinaryMessageDecoder DECODER =
+ new BinaryMessageDecoder<>(MODEL$, SCHEMA$);
+
+ /**
+ * Return the BinaryMessageEncoder instance used by this class.
+ * @return the message encoder used by this class
+ */
+ public static BinaryMessageEncoder getEncoder() {
+ return ENCODER;
+ }
+
+ /**
+ * Return the BinaryMessageDecoder instance used by this class.
+ * @return the message decoder used by this class
+ */
+ public static BinaryMessageDecoder getDecoder() {
+ return DECODER;
+ }
+
+ /**
+ * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
+ * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
+ * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
+ */
+ public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
+ return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver);
+ }
+
+ /**
+ * Serializes this AvroProduct to a ByteBuffer.
+ * @return a buffer holding the serialized data for this instance
+ * @throws java.io.IOException if this instance could not be serialized
+ */
+ public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
+ return ENCODER.encode(this);
+ }
+
+ /**
+ * Deserializes a AvroProduct from a ByteBuffer.
+ * @param b a byte buffer holding serialized data for an instance of this class
+ * @return a AvroProduct instance decoded from the given buffer
+ * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
+ */
+ public static AvroProduct fromByteBuffer(
+ java.nio.ByteBuffer b) throws java.io.IOException {
+ return DECODER.decode(b);
+ }
+
+ private int id;
+ private java.lang.String name;
+ private double price;
+
+ /**
+ * Default constructor. Note that this does not initialize fields
+ * to their default values from the schema. If that is desired then
+ * one should use newBuilder().
+ */
+ public AvroProduct() {}
+
+ /**
+ * All-args constructor.
+ * @param id The new value for id
+ * @param name The new value for name
+ * @param price The new value for price
+ */
+ public AvroProduct(java.lang.Integer id, java.lang.String name, java.lang.Double price) {
+ this.id = id;
+ this.name = name;
+ this.price = price;
+ }
+
+ @Override
+ public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
+
+ @Override
+ public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+
+ // Used by DatumWriter. Applications should not call.
+ @Override
+ public java.lang.Object get(int field$) {
+ switch (field$) {
+ case 0: return id;
+ case 1: return name;
+ case 2: return price;
+ default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
+ }
+ }
+
+ // Used by DatumReader. Applications should not call.
+ @Override
+ @SuppressWarnings(value="unchecked")
+ public void put(int field$, java.lang.Object value$) {
+ switch (field$) {
+ case 0: id = (java.lang.Integer)value$; break;
+ case 1: name = value$ != null ? value$.toString() : null; break;
+ case 2: price = (java.lang.Double)value$; break;
+ default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
+ }
+ }
+
+ /**
+ * Gets the value of the 'id' field.
+ * @return The value of the 'id' field.
+ */
+ public int getId() {
+ return id;
+ }
+
+
+ /**
+ * Sets the value of the 'id' field.
+ * @param value the value to set.
+ */
+ public void setId(int value) {
+ this.id = value;
+ }
+
+ /**
+ * Gets the value of the 'name' field.
+ * @return The value of the 'name' field.
+ */
+ public java.lang.String getName() {
+ return name;
+ }
+
+
+ /**
+ * Sets the value of the 'name' field.
+ * @param value the value to set.
+ */
+ public void setName(java.lang.String value) {
+ this.name = value;
+ }
+
+ /**
+ * Gets the value of the 'price' field.
+ * @return The value of the 'price' field.
+ */
+ public double getPrice() {
+ return price;
+ }
+
+
+ /**
+ * Sets the value of the 'price' field.
+ * @param value the value to set.
+ */
+ public void setPrice(double value) {
+ this.price = value;
+ }
+
+ /**
+ * Creates a new AvroProduct RecordBuilder.
+ * @return A new AvroProduct RecordBuilder
+ */
+ public static org.demo.kafka.avro.AvroProduct.Builder newBuilder() {
+ return new org.demo.kafka.avro.AvroProduct.Builder();
+ }
+
+ /**
+ * Creates a new AvroProduct RecordBuilder by copying an existing Builder.
+ * @param other The existing builder to copy.
+ * @return A new AvroProduct RecordBuilder
+ */
+ public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct.Builder other) {
+ if (other == null) {
+ return new org.demo.kafka.avro.AvroProduct.Builder();
+ } else {
+ return new org.demo.kafka.avro.AvroProduct.Builder(other);
+ }
+ }
+
+ /**
+ * Creates a new AvroProduct RecordBuilder by copying an existing AvroProduct instance.
+ * @param other The existing instance to copy.
+ * @return A new AvroProduct RecordBuilder
+ */
+ public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct other) {
+ if (other == null) {
+ return new org.demo.kafka.avro.AvroProduct.Builder();
+ } else {
+ return new org.demo.kafka.avro.AvroProduct.Builder(other);
+ }
+ }
+
+ /**
+ * RecordBuilder for AvroProduct instances.
+ */
+ @org.apache.avro.specific.AvroGenerated
+ public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
+ implements org.apache.avro.data.RecordBuilder {
+
+ private int id;
+ private java.lang.String name;
+ private double price;
+
+ /** Creates a new Builder */
+ private Builder() {
+ super(SCHEMA$, MODEL$);
+ }
+
+ /**
+ * Creates a Builder by copying an existing Builder.
+ * @param other The existing Builder to copy.
+ */
+ private Builder(org.demo.kafka.avro.AvroProduct.Builder other) {
+ super(other);
+ if (isValidValue(fields()[0], other.id)) {
+ this.id = data().deepCopy(fields()[0].schema(), other.id);
+ fieldSetFlags()[0] = other.fieldSetFlags()[0];
+ }
+ if (isValidValue(fields()[1], other.name)) {
+ this.name = data().deepCopy(fields()[1].schema(), other.name);
+ fieldSetFlags()[1] = other.fieldSetFlags()[1];
+ }
+ if (isValidValue(fields()[2], other.price)) {
+ this.price = data().deepCopy(fields()[2].schema(), other.price);
+ fieldSetFlags()[2] = other.fieldSetFlags()[2];
+ }
+ }
+
+ /**
+ * Creates a Builder by copying an existing AvroProduct instance
+ * @param other The existing instance to copy.
+ */
+ private Builder(org.demo.kafka.avro.AvroProduct other) {
+ super(SCHEMA$, MODEL$);
+ if (isValidValue(fields()[0], other.id)) {
+ this.id = data().deepCopy(fields()[0].schema(), other.id);
+ fieldSetFlags()[0] = true;
+ }
+ if (isValidValue(fields()[1], other.name)) {
+ this.name = data().deepCopy(fields()[1].schema(), other.name);
+ fieldSetFlags()[1] = true;
+ }
+ if (isValidValue(fields()[2], other.price)) {
+ this.price = data().deepCopy(fields()[2].schema(), other.price);
+ fieldSetFlags()[2] = true;
+ }
+ }
+
+ /**
+ * Gets the value of the 'id' field.
+ * @return The value.
+ */
+ public int getId() {
+ return id;
+ }
+
+
+ /**
+ * Sets the value of the 'id' field.
+ * @param value The value of 'id'.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder setId(int value) {
+ validate(fields()[0], value);
+ this.id = value;
+ fieldSetFlags()[0] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'id' field has been set.
+ * @return True if the 'id' field has been set, false otherwise.
+ */
+ public boolean hasId() {
+ return fieldSetFlags()[0];
+ }
+
+
+ /**
+ * Clears the value of the 'id' field.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder clearId() {
+ fieldSetFlags()[0] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'name' field.
+ * @return The value.
+ */
+ public java.lang.String getName() {
+ return name;
+ }
+
+
+ /**
+ * Sets the value of the 'name' field.
+ * @param value The value of 'name'.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder setName(java.lang.String value) {
+ validate(fields()[1], value);
+ this.name = value;
+ fieldSetFlags()[1] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'name' field has been set.
+ * @return True if the 'name' field has been set, false otherwise.
+ */
+ public boolean hasName() {
+ return fieldSetFlags()[1];
+ }
+
+
+ /**
+ * Clears the value of the 'name' field.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder clearName() {
+ name = null;
+ fieldSetFlags()[1] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'price' field.
+ * @return The value.
+ */
+ public double getPrice() {
+ return price;
+ }
+
+
+ /**
+ * Sets the value of the 'price' field.
+ * @param value The value of 'price'.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder setPrice(double value) {
+ validate(fields()[2], value);
+ this.price = value;
+ fieldSetFlags()[2] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'price' field has been set.
+ * @return True if the 'price' field has been set, false otherwise.
+ */
+ public boolean hasPrice() {
+ return fieldSetFlags()[2];
+ }
+
+
+ /**
+ * Clears the value of the 'price' field.
+ * @return This builder.
+ */
+ public org.demo.kafka.avro.AvroProduct.Builder clearPrice() {
+ fieldSetFlags()[2] = false;
+ return this;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public AvroProduct build() {
+ try {
+ AvroProduct record = new AvroProduct();
+ record.id = fieldSetFlags()[0] ? this.id : (java.lang.Integer) defaultValue(fields()[0]);
+ record.name = fieldSetFlags()[1] ? this.name : (java.lang.String) defaultValue(fields()[1]);
+ record.price = fieldSetFlags()[2] ? this.price : (java.lang.Double) defaultValue(fields()[2]);
+ return record;
+ } catch (org.apache.avro.AvroMissingFieldException e) {
+ throw e;
+ } catch (java.lang.Exception e) {
+ throw new org.apache.avro.AvroRuntimeException(e);
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumWriter
+ WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
+
+ @Override public void writeExternal(java.io.ObjectOutput out)
+ throws java.io.IOException {
+ WRITER$.write(this, SpecificData.getEncoder(out));
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumReader
+ READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
+
+ @Override public void readExternal(java.io.ObjectInput in)
+ throws java.io.IOException {
+ READER$.read(this, SpecificData.getDecoder(in));
+ }
+
+ @Override protected boolean hasCustomCoders() { return true; }
+
+ @Override public void customEncode(org.apache.avro.io.Encoder out)
+ throws java.io.IOException
+ {
+ out.writeInt(this.id);
+
+ out.writeString(this.name);
+
+ out.writeDouble(this.price);
+
+ }
+
+ @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
+ throws java.io.IOException
+ {
+ org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
+ if (fieldOrder == null) {
+ this.id = in.readInt();
+
+ this.name = in.readString();
+
+ this.price = in.readDouble();
+
+ } else {
+ for (int i = 0; i < 3; i++) {
+ switch (fieldOrder[i].pos()) {
+ case 0:
+ this.id = in.readInt();
+ break;
+
+ case 1:
+ this.name = in.readString();
+ break;
+
+ case 2:
+ this.price = in.readDouble();
+ break;
+
+ default:
+ throw new java.io.IOException("Corrupt ResolvingDecoder.");
+ }
+ }
+ }
+ }
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java
new file mode 100644
index 000000000..6da9113fc
--- /dev/null
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java
@@ -0,0 +1,636 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// NO CHECKED-IN PROTOBUF GENCODE
+// source: ProtobufProduct.proto
+// Protobuf Java Version: 4.31.0
+
+package org.demo.kafka.protobuf;
+
+/**
+ * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct}
+ */
+@com.google.protobuf.Generated
+public final class ProtobufProduct extends
+ com.google.protobuf.GeneratedMessage implements
+ // @@protoc_insertion_point(message_implements:org.demo.kafka.protobuf.ProtobufProduct)
+ ProtobufProductOrBuilder {
+private static final long serialVersionUID = 0L;
+ static {
+ com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion(
+ com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC,
+ /* major= */ 4,
+ /* minor= */ 31,
+ /* patch= */ 0,
+ /* suffix= */ "",
+ ProtobufProduct.class.getName());
+ }
+ // Use ProtobufProduct.newBuilder() to construct.
+ private ProtobufProduct(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ }
+ private ProtobufProduct() {
+ name_ = "";
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class);
+ }
+
+ public static final int ID_FIELD_NUMBER = 1;
+ private int id_ = 0;
+ /**
+ * int32 id = 1;
+ * @return The id.
+ */
+ @java.lang.Override
+ public int getId() {
+ return id_;
+ }
+
+ public static final int NAME_FIELD_NUMBER = 2;
+ @SuppressWarnings("serial")
+ private volatile java.lang.Object name_ = "";
+ /**
+ * string name = 2;
+ * @return The name.
+ */
+ @java.lang.Override
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ name_ = s;
+ return s;
+ }
+ }
+ /**
+ * string name = 2;
+ * @return The bytes for name.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int PRICE_FIELD_NUMBER = 3;
+ private double price_ = 0D;
+ /**
+ * double price = 3;
+ * @return The price.
+ */
+ @java.lang.Override
+ public double getPrice() {
+ return price_;
+ }
+
+ private byte memoizedIsInitialized = -1;
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ if (id_ != 0) {
+ output.writeInt32(1, id_);
+ }
+ if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) {
+ com.google.protobuf.GeneratedMessage.writeString(output, 2, name_);
+ }
+ if (java.lang.Double.doubleToRawLongBits(price_) != 0) {
+ output.writeDouble(3, price_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (id_ != 0) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(1, id_);
+ }
+ if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) {
+ size += com.google.protobuf.GeneratedMessage.computeStringSize(2, name_);
+ }
+ if (java.lang.Double.doubleToRawLongBits(price_) != 0) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeDoubleSize(3, price_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.demo.kafka.protobuf.ProtobufProduct)) {
+ return super.equals(obj);
+ }
+ org.demo.kafka.protobuf.ProtobufProduct other = (org.demo.kafka.protobuf.ProtobufProduct) obj;
+
+ if (getId()
+ != other.getId()) return false;
+ if (!getName()
+ .equals(other.getName())) return false;
+ if (java.lang.Double.doubleToLongBits(getPrice())
+ != java.lang.Double.doubleToLongBits(
+ other.getPrice())) return false;
+ if (!getUnknownFields().equals(other.getUnknownFields())) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (37 * hash) + ID_FIELD_NUMBER;
+ hash = (53 * hash) + getId();
+ hash = (37 * hash) + NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getName().hashCode();
+ hash = (37 * hash) + PRICE_FIELD_NUMBER;
+ hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+ java.lang.Double.doubleToLongBits(getPrice()));
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseDelimitedWithIOException(PARSER, input);
+ }
+
+ public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessage
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+ public static Builder newBuilder(org.demo.kafka.protobuf.ProtobufProduct prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder implements
+ // @@protoc_insertion_point(builder_implements:org.demo.kafka.protobuf.ProtobufProduct)
+ org.demo.kafka.protobuf.ProtobufProductOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class);
+ }
+
+ // Construct using org.demo.kafka.protobuf.ProtobufProduct.newBuilder()
+ private Builder() {
+
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+
+ }
+ @java.lang.Override
+ public Builder clear() {
+ super.clear();
+ bitField0_ = 0;
+ id_ = 0;
+ name_ = "";
+ price_ = 0D;
+ return this;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor;
+ }
+
+ @java.lang.Override
+ public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() {
+ return org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance();
+ }
+
+ @java.lang.Override
+ public org.demo.kafka.protobuf.ProtobufProduct build() {
+ org.demo.kafka.protobuf.ProtobufProduct result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ @java.lang.Override
+ public org.demo.kafka.protobuf.ProtobufProduct buildPartial() {
+ org.demo.kafka.protobuf.ProtobufProduct result = new org.demo.kafka.protobuf.ProtobufProduct(this);
+ if (bitField0_ != 0) { buildPartial0(result); }
+ onBuilt();
+ return result;
+ }
+
+ private void buildPartial0(org.demo.kafka.protobuf.ProtobufProduct result) {
+ int from_bitField0_ = bitField0_;
+ if (((from_bitField0_ & 0x00000001) != 0)) {
+ result.id_ = id_;
+ }
+ if (((from_bitField0_ & 0x00000002) != 0)) {
+ result.name_ = name_;
+ }
+ if (((from_bitField0_ & 0x00000004) != 0)) {
+ result.price_ = price_;
+ }
+ }
+
+ @java.lang.Override
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.demo.kafka.protobuf.ProtobufProduct) {
+ return mergeFrom((org.demo.kafka.protobuf.ProtobufProduct)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.demo.kafka.protobuf.ProtobufProduct other) {
+ if (other == org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance()) return this;
+ if (other.getId() != 0) {
+ setId(other.getId());
+ }
+ if (!other.getName().isEmpty()) {
+ name_ = other.name_;
+ bitField0_ |= 0x00000002;
+ onChanged();
+ }
+ if (java.lang.Double.doubleToRawLongBits(other.getPrice()) != 0) {
+ setPrice(other.getPrice());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ onChanged();
+ return this;
+ }
+
+ @java.lang.Override
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ @java.lang.Override
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ case 8: {
+ id_ = input.readInt32();
+ bitField0_ |= 0x00000001;
+ break;
+ } // case 8
+ case 18: {
+ name_ = input.readStringRequireUtf8();
+ bitField0_ |= 0x00000002;
+ break;
+ } // case 18
+ case 25: {
+ price_ = input.readDouble();
+ bitField0_ |= 0x00000004;
+ break;
+ } // case 25
+ default: {
+ if (!super.parseUnknownField(input, extensionRegistry, tag)) {
+ done = true; // was an endgroup tag
+ }
+ break;
+ } // default:
+ } // switch (tag)
+ } // while (!done)
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.unwrapIOException();
+ } finally {
+ onChanged();
+ } // finally
+ return this;
+ }
+ private int bitField0_;
+
+ private int id_ ;
+ /**
+ * int32 id = 1;
+ * @return The id.
+ */
+ @java.lang.Override
+ public int getId() {
+ return id_;
+ }
+ /**
+ * int32 id = 1;
+ * @param value The id to set.
+ * @return This builder for chaining.
+ */
+ public Builder setId(int value) {
+
+ id_ = value;
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return this;
+ }
+ /**
+ * int32 id = 1;
+ * @return This builder for chaining.
+ */
+ public Builder clearId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ id_ = 0;
+ onChanged();
+ return this;
+ }
+
+ private java.lang.Object name_ = "";
+ /**
+ * string name = 2;
+ * @return The name.
+ */
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ name_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * string name = 2;
+ * @return The bytes for name.
+ */
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * string name = 2;
+ * @param value The name to set.
+ * @return This builder for chaining.
+ */
+ public Builder setName(
+ java.lang.String value) {
+ if (value == null) { throw new NullPointerException(); }
+ name_ = value;
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return this;
+ }
+ /**
+ * string name = 2;
+ * @return This builder for chaining.
+ */
+ public Builder clearName() {
+ name_ = getDefaultInstance().getName();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ return this;
+ }
+ /**
+ * string name = 2;
+ * @param value The bytes for name to set.
+ * @return This builder for chaining.
+ */
+ public Builder setNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) { throw new NullPointerException(); }
+ checkByteStringIsUtf8(value);
+ name_ = value;
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return this;
+ }
+
+ private double price_ ;
+ /**
+ * double price = 3;
+ * @return The price.
+ */
+ @java.lang.Override
+ public double getPrice() {
+ return price_;
+ }
+ /**
+ * double price = 3;
+ * @param value The price to set.
+ * @return This builder for chaining.
+ */
+ public Builder setPrice(double value) {
+
+ price_ = value;
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return this;
+ }
+ /**
+ * double price = 3;
+ * @return This builder for chaining.
+ */
+ public Builder clearPrice() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ price_ = 0D;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:org.demo.kafka.protobuf.ProtobufProduct)
+ }
+
+ // @@protoc_insertion_point(class_scope:org.demo.kafka.protobuf.ProtobufProduct)
+ private static final org.demo.kafka.protobuf.ProtobufProduct DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.demo.kafka.protobuf.ProtobufProduct();
+ }
+
+ public static org.demo.kafka.protobuf.ProtobufProduct getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public ProtobufProduct parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ Builder builder = newBuilder();
+ try {
+ builder.mergeFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(builder.buildPartial());
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(e)
+ .setUnfinishedMessage(builder.buildPartial());
+ }
+ return builder.buildPartial();
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
+ }
+
+}
+
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java
new file mode 100644
index 000000000..9c1518db3
--- /dev/null
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java
@@ -0,0 +1,36 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// NO CHECKED-IN PROTOBUF GENCODE
+// source: ProtobufProduct.proto
+// Protobuf Java Version: 4.31.0
+
+package org.demo.kafka.protobuf;
+
+@com.google.protobuf.Generated
+public interface ProtobufProductOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:org.demo.kafka.protobuf.ProtobufProduct)
+ com.google.protobuf.MessageOrBuilder {
+
+ /**
+ * int32 id = 1;
+ * @return The id.
+ */
+ int getId();
+
+ /**
+ * string name = 2;
+ * @return The name.
+ */
+ java.lang.String getName();
+ /**
+ * string name = 2;
+ * @return The bytes for name.
+ */
+ com.google.protobuf.ByteString
+ getNameBytes();
+
+ /**
+ * double price = 3;
+ * @return The price.
+ */
+ double getPrice();
+}
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java
new file mode 100644
index 000000000..6a99f35ec
--- /dev/null
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java
@@ -0,0 +1,63 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// NO CHECKED-IN PROTOBUF GENCODE
+// source: ProtobufProduct.proto
+// Protobuf Java Version: 4.31.0
+
+package org.demo.kafka.protobuf;
+
+@com.google.protobuf.Generated
+public final class ProtobufProductOuterClass {
+ private ProtobufProductOuterClass() {}
+ static {
+ com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion(
+ com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC,
+ /* major= */ 4,
+ /* minor= */ 31,
+ /* patch= */ 0,
+ /* suffix= */ "",
+ ProtobufProductOuterClass.class.getName());
+ }
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistryLite registry) {
+ }
+
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ registerAllExtensions(
+ (com.google.protobuf.ExtensionRegistryLite) registry);
+ }
+ static final com.google.protobuf.Descriptors.Descriptor
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor;
+ static final
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\025ProtobufProduct.proto\022\027org.demo.kafka." +
+ "protobuf\":\n\017ProtobufProduct\022\n\n\002id\030\001 \001(\005\022" +
+ "\014\n\004name\030\002 \001(\t\022\r\n\005price\030\003 \001(\001B6\n\027org.demo" +
+ ".kafka.protobufB\031ProtobufProductOuterCla" +
+ "ssP\001b\006proto3"
+ };
+ descriptor = com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ });
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor,
+ new java.lang.String[] { "Id", "Name", "Price", });
+ descriptor.resolveAllFeaturesImmutable();
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java
new file mode 100644
index 000000000..4bd6ebd13
--- /dev/null
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java
@@ -0,0 +1,121 @@
+package org.demo.kafka.tools;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Base64;
+
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.demo.kafka.avro.AvroProduct;
+
+/**
+ * Utility class to generate base64-encoded Avro serialized products
+ * for use in test events.
+ */
+public class GenerateAvroSamples {
+
+ public static void main(String[] args) throws IOException {
+ // Create three different products
+ AvroProduct product1 = new AvroProduct(1001, "Laptop", 999.99);
+ AvroProduct product2 = new AvroProduct(1002, "Smartphone", 599.99);
+ AvroProduct product3 = new AvroProduct(1003, "Headphones", 149.99);
+
+ // Serialize and encode each product
+ String encodedProduct1 = serializeAndEncode(product1);
+ String encodedProduct2 = serializeAndEncode(product2);
+ String encodedProduct3 = serializeAndEncode(product3);
+
+ // Serialize and encode an integer key
+ String encodedKey = serializeAndEncodeInteger(42);
+
+ // Print the results
+ System.out.println("Base64 encoded Avro products for use in kafka-avro-event.json:");
+ System.out.println("\nProduct 1 (with key):");
+ System.out.println("key: \"" + encodedKey + "\",");
+ System.out.println("value: \"" + encodedProduct1 + "\",");
+
+ System.out.println("\nProduct 2 (with key):");
+ System.out.println("key: \"" + encodedKey + "\",");
+ System.out.println("value: \"" + encodedProduct2 + "\",");
+
+ System.out.println("\nProduct 3 (without key):");
+ System.out.println("key: null,");
+ System.out.println("value: \"" + encodedProduct3 + "\",");
+
+ // Print a sample event structure
+ System.out.println("\nSample event structure:");
+ printSampleEvent(encodedKey, encodedProduct1, encodedProduct2, encodedProduct3);
+ }
+
+ private static String serializeAndEncode(AvroProduct product) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
+ DatumWriter writer = new SpecificDatumWriter<>(AvroProduct.class);
+
+ writer.write(product, encoder);
+ encoder.flush();
+
+ return Base64.getEncoder().encodeToString(baos.toByteArray());
+ }
+
+ private static String serializeAndEncodeInteger(Integer value) throws IOException {
+ // For simple types like integers, we'll just convert to string and encode
+ return Base64.getEncoder().encodeToString(value.toString().getBytes());
+ }
+
+ private static void printSampleEvent(String key, String product1, String product2, String product3) {
+ System.out.println("{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n" +
+ " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n" +
+ " \"records\": {\n" +
+ " \"mytopic-0\": [\n" +
+ " {\n" +
+ " \"topic\": \"mytopic\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": \"" + key + "\",\n" +
+ " \"value\": \"" + product1 + "\",\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"topic\": \"mytopic\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 16,\n" +
+ " \"timestamp\": 1545084650988,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": \"" + key + "\",\n" +
+ " \"value\": \"" + product2 + "\",\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"topic\": \"mytopic\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 17,\n" +
+ " \"timestamp\": 1545084650989,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": \"" + product3 + "\",\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}");
+ }
+}
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java
new file mode 100644
index 000000000..a4fd6565a
--- /dev/null
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java
@@ -0,0 +1,126 @@
+package org.demo.kafka.tools;
+
+import java.io.IOException;
+import java.util.Base64;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * Utility class to generate base64-encoded JSON serialized products
+ * for use in test events.
+ */
+public class GenerateJsonSamples {
+
+ public static void main(String[] args) throws IOException {
+ // Create three different products
+ Map product1 = new HashMap<>();
+ product1.put("id", 1001);
+ product1.put("name", "Laptop");
+ product1.put("price", 999.99);
+
+ Map product2 = new HashMap<>();
+ product2.put("id", 1002);
+ product2.put("name", "Smartphone");
+ product2.put("price", 599.99);
+
+ Map product3 = new HashMap<>();
+ product3.put("id", 1003);
+ product3.put("name", "Headphones");
+ product3.put("price", 149.99);
+
+ // Serialize and encode each product
+ String encodedProduct1 = serializeAndEncode(product1);
+ String encodedProduct2 = serializeAndEncode(product2);
+ String encodedProduct3 = serializeAndEncode(product3);
+
+ // Serialize and encode an integer key
+ String encodedKey = serializeAndEncodeInteger(42);
+
+ // Print the results
+ System.out.println("Base64 encoded JSON products for use in kafka-json-event.json:");
+ System.out.println("\nProduct 1 (with key):");
+ System.out.println("key: \"" + encodedKey + "\",");
+ System.out.println("value: \"" + encodedProduct1 + "\",");
+
+ System.out.println("\nProduct 2 (with key):");
+ System.out.println("key: \"" + encodedKey + "\",");
+ System.out.println("value: \"" + encodedProduct2 + "\",");
+
+ System.out.println("\nProduct 3 (without key):");
+ System.out.println("key: null,");
+ System.out.println("value: \"" + encodedProduct3 + "\",");
+
+ // Print a sample event structure
+ System.out.println("\nSample event structure:");
+ printSampleEvent(encodedKey, encodedProduct1, encodedProduct2, encodedProduct3);
+ }
+
+ private static String serializeAndEncode(Map product) throws IOException {
+ ObjectMapper mapper = new ObjectMapper();
+ String json = mapper.writeValueAsString(product);
+ return Base64.getEncoder().encodeToString(json.getBytes());
+ }
+
+ private static String serializeAndEncodeInteger(Integer value) {
+ // For simple types like integers, we'll just convert to string and encode
+ return Base64.getEncoder().encodeToString(value.toString().getBytes());
+ }
+
+ private static void printSampleEvent(String key, String product1, String product2, String product3) {
+ System.out.println("{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n"
+ +
+ " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n"
+ +
+ " \"records\": {\n" +
+ " \"mytopic-0\": [\n" +
+ " {\n" +
+ " \"topic\": \"mytopic\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": \"" + key + "\",\n" +
+ " \"value\": \"" + product1 + "\",\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"topic\": \"mytopic\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": \"" + key + "\",\n" +
+ " \"value\": \"" + product2 + "\",\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"topic\": \"mytopic\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": \"" + product3 + "\",\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}");
+ }
+}
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java
new file mode 100644
index 000000000..ae078a28a
--- /dev/null
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java
@@ -0,0 +1,125 @@
+package org.demo.kafka.tools;
+
+import java.io.IOException;
+import java.util.Base64;
+
+import org.demo.kafka.protobuf.ProtobufProduct;
+
+/**
+ * Utility class to generate base64-encoded Protobuf serialized products
+ * for use in test events.
+ */
+public class GenerateProtobufSamples {
+
+ public static void main(String[] args) throws IOException {
+ // Create three different products
+ ProtobufProduct product1 = ProtobufProduct.newBuilder()
+ .setId(1001)
+ .setName("Laptop")
+ .setPrice(999.99)
+ .build();
+
+ ProtobufProduct product2 = ProtobufProduct.newBuilder()
+ .setId(1002)
+ .setName("Smartphone")
+ .setPrice(599.99)
+ .build();
+
+ ProtobufProduct product3 = ProtobufProduct.newBuilder()
+ .setId(1003)
+ .setName("Headphones")
+ .setPrice(149.99)
+ .build();
+
+ // Serialize and encode each product
+ String encodedProduct1 = serializeAndEncode(product1);
+ String encodedProduct2 = serializeAndEncode(product2);
+ String encodedProduct3 = serializeAndEncode(product3);
+
+ // Serialize and encode an integer key
+ String encodedKey = serializeAndEncodeInteger(42);
+
+ // Print the results
+ System.out.println("Base64 encoded Protobuf products for use in kafka-protobuf-event.json:");
+ System.out.println("\nProduct 1 (with key):");
+ System.out.println("key: \"" + encodedKey + "\",");
+ System.out.println("value: \"" + encodedProduct1 + "\",");
+
+ System.out.println("\nProduct 2 (with key):");
+ System.out.println("key: \"" + encodedKey + "\",");
+ System.out.println("value: \"" + encodedProduct2 + "\",");
+
+ System.out.println("\nProduct 3 (without key):");
+ System.out.println("key: null,");
+ System.out.println("value: \"" + encodedProduct3 + "\",");
+
+ // Print a sample event structure
+ System.out.println("\nSample event structure:");
+ printSampleEvent(encodedKey, encodedProduct1, encodedProduct2, encodedProduct3);
+ }
+
+ private static String serializeAndEncode(ProtobufProduct product) {
+ return Base64.getEncoder().encodeToString(product.toByteArray());
+ }
+
+ private static String serializeAndEncodeInteger(Integer value) {
+ // For simple types like integers, we'll just convert to string and encode
+ return Base64.getEncoder().encodeToString(value.toString().getBytes());
+ }
+
+ private static void printSampleEvent(String key, String product1, String product2, String product3) {
+ System.out.println("{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n"
+ +
+ " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n"
+ +
+ " \"records\": {\n" +
+ " \"mytopic-0\": [\n" +
+ " {\n" +
+ " \"topic\": \"mytopic\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": \"" + key + "\",\n" +
+ " \"value\": \"" + product1 + "\",\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"topic\": \"mytopic\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 16,\n" +
+ " \"timestamp\": 1545084650988,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": \"" + key + "\",\n" +
+ " \"value\": \"" + product2 + "\",\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"topic\": \"mytopic\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 17,\n" +
+ " \"timestamp\": 1545084650989,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": \"" + product3 + "\",\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}");
+ }
+}
diff --git a/mkdocs.yml b/mkdocs.yml
index 82a32d49c..07be3c175 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -15,8 +15,9 @@ nav:
- Utilities:
- utilities/idempotency.md
- utilities/parameters.md
- - utilities/large_messages.md
- utilities/batch.md
+ - utilities/kafka.md
+ - utilities/large_messages.md
- utilities/validation.md
- utilities/custom_resources.md
- utilities/serialization.md
@@ -101,8 +102,9 @@ plugins:
Utilities:
- utilities/idempotency.md
- utilities/parameters.md
- - utilities/large_messages.md
- utilities/batch.md
+ - utilities/kafka.md
+ - utilities/large_messages.md
- utilities/validation.md
- utilities/custom_resources.md
- utilities/serialization.md
@@ -115,6 +117,7 @@ extra_css:
extra_javascript:
- javascript/aws-amplify.min.js
- javascript/extra.js
+ - https://docs.powertools.aws.dev/shared/mermaid.min.js
extra:
powertools:
diff --git a/pom.xml b/pom.xml
index 951e155f6..f27ffc497 100644
--- a/pom.xml
+++ b/pom.xml
@@ -56,6 +56,7 @@
powertools-commonpowertools-serialization
+ powertools-kafkapowertools-loggingpowertools-logging/powertools-logging-log4jpowertools-logging/powertools-logging-logback
@@ -113,7 +114,9 @@
1.12.7812.18.01.6.0
- 5.17.0
+ 5.18.0
+ 5.18.0
+ 2.3.0
@@ -355,7 +358,7 @@
org.mockitomockito-junit-jupiter
- ${mockito.version}
+ ${mockito-junit-jupiter.version}test
diff --git a/powertools-e2e-tests/handlers/batch/pom.xml b/powertools-e2e-tests/handlers/batch/pom.xml
index a36d464ea..3b7238b4e 100644
--- a/powertools-e2e-tests/handlers/batch/pom.xml
+++ b/powertools-e2e-tests/handlers/batch/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-batch
diff --git a/powertools-e2e-tests/handlers/idempotency/pom.xml b/powertools-e2e-tests/handlers/idempotency/pom.xml
index e3a67a5b5..dfa97225a 100644
--- a/powertools-e2e-tests/handlers/idempotency/pom.xml
+++ b/powertools-e2e-tests/handlers/idempotency/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-idempotency
diff --git a/powertools-e2e-tests/handlers/largemessage/pom.xml b/powertools-e2e-tests/handlers/largemessage/pom.xml
index 0728404bf..ce3fbbdd5 100644
--- a/powertools-e2e-tests/handlers/largemessage/pom.xml
+++ b/powertools-e2e-tests/handlers/largemessage/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-largemessage
diff --git a/powertools-e2e-tests/handlers/largemessage_idempotent/pom.xml b/powertools-e2e-tests/handlers/largemessage_idempotent/pom.xml
index b57063346..e9e87da2b 100644
--- a/powertools-e2e-tests/handlers/largemessage_idempotent/pom.xml
+++ b/powertools-e2e-tests/handlers/largemessage_idempotent/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-large-msg-idempotent
diff --git a/powertools-e2e-tests/handlers/logging/pom.xml b/powertools-e2e-tests/handlers/logging/pom.xml
index 88feda09b..62f2f7530 100644
--- a/powertools-e2e-tests/handlers/logging/pom.xml
+++ b/powertools-e2e-tests/handlers/logging/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-logging
diff --git a/powertools-e2e-tests/handlers/metrics/pom.xml b/powertools-e2e-tests/handlers/metrics/pom.xml
index 68059e67e..e543c2cd0 100644
--- a/powertools-e2e-tests/handlers/metrics/pom.xml
+++ b/powertools-e2e-tests/handlers/metrics/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-metrics
diff --git a/powertools-e2e-tests/handlers/parameters/pom.xml b/powertools-e2e-tests/handlers/parameters/pom.xml
index 2d6a9a06a..471e79d8f 100644
--- a/powertools-e2e-tests/handlers/parameters/pom.xml
+++ b/powertools-e2e-tests/handlers/parameters/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-parameters
diff --git a/powertools-e2e-tests/handlers/pom.xml b/powertools-e2e-tests/handlers/pom.xml
index b55cf436a..988ae3d55 100644
--- a/powertools-e2e-tests/handlers/pom.xml
+++ b/powertools-e2e-tests/handlers/pom.xml
@@ -4,13 +4,13 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0pomHandlers for End-to-End testsFake handlers that use Powertools for AWS Lambda (Java).
- 2.0.0-SNAPSHOT
+ 2.0.0UTF-81111
diff --git a/powertools-e2e-tests/handlers/tracing/pom.xml b/powertools-e2e-tests/handlers/tracing/pom.xml
index b96fcef0a..b1bc14c05 100644
--- a/powertools-e2e-tests/handlers/tracing/pom.xml
+++ b/powertools-e2e-tests/handlers/tracing/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-tracing
diff --git a/powertools-e2e-tests/handlers/validation-alb-event/pom.xml b/powertools-e2e-tests/handlers/validation-alb-event/pom.xml
index be50094c1..36695b9a4 100644
--- a/powertools-e2e-tests/handlers/validation-alb-event/pom.xml
+++ b/powertools-e2e-tests/handlers/validation-alb-event/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-validation-alb-event
diff --git a/powertools-e2e-tests/handlers/validation-apigw-event/pom.xml b/powertools-e2e-tests/handlers/validation-apigw-event/pom.xml
index f204a8a9f..8bb927778 100644
--- a/powertools-e2e-tests/handlers/validation-apigw-event/pom.xml
+++ b/powertools-e2e-tests/handlers/validation-apigw-event/pom.xml
@@ -5,7 +5,7 @@
software.amazon.lambdae2e-test-handlers-parent
- 2.0.0-SNAPSHOT
+ 2.0.0e2e-test-handler-validation-apigw-event
diff --git a/powertools-e2e-tests/src/test/java/software/amazon/lambda/powertools/testutils/Infrastructure.java b/powertools-e2e-tests/src/test/java/software/amazon/lambda/powertools/testutils/Infrastructure.java
index 143409989..07d816112 100644
--- a/powertools-e2e-tests/src/test/java/software/amazon/lambda/powertools/testutils/Infrastructure.java
+++ b/powertools-e2e-tests/src/test/java/software/amazon/lambda/powertools/testutils/Infrastructure.java
@@ -290,6 +290,7 @@ private Stack createStackWithLambda() {
.queueName(queue)
.visibilityTimeout(Duration.seconds(timeout * 6))
.retentionPeriod(Duration.seconds(timeout * 6))
+ .removalPolicy(RemovalPolicy.DESTROY)
.build();
DeadLetterQueue.builder()
.queue(sqsQueue)
@@ -314,6 +315,7 @@ private Stack createStackWithLambda() {
.create(e2eStack, "KinesisStream")
.streamMode(StreamMode.ON_DEMAND)
.streamName(kinesisStream)
+ .removalPolicy(RemovalPolicy.DESTROY)
.build();
stream.grantRead(function);
diff --git a/powertools-kafka/pom.xml b/powertools-kafka/pom.xml
new file mode 100644
index 000000000..f5b80012c
--- /dev/null
+++ b/powertools-kafka/pom.xml
@@ -0,0 +1,223 @@
+
+
+
+
+ 4.0.0
+
+
+ powertools-parent
+ software.amazon.lambda
+ 2.0.0
+
+
+ powertools-kafka
+ jar
+
+ Powertools for AWS Lambda (Java) - Kafka Consumer
+
+
+
+ 4.0.0
+ 1.12.0
+ 4.31.0
+ 1.1.5
+
+
+
+
+ org.slf4j
+ slf4j-api
+
+
+ com.amazonaws
+ aws-lambda-java-core
+
+
+ com.amazonaws
+ aws-lambda-java-events
+
+
+ org.apache.kafka
+ kafka-clients
+ ${kafka-clients.version}
+ provided
+
+
+ org.apache.avro
+ avro
+ ${avro.version}
+
+
+ com.google.protobuf
+ protobuf-java
+ ${protobuf.version}
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+ com.amazonaws
+ aws-lambda-java-serialization
+ ${lambda-serialization.version}
+
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+ org.mockito
+ mockito-core
+ test
+
+
+ org.junit-pioneer
+ junit-pioneer
+ test
+
+
+ org.slf4j
+ slf4j-simple
+ test
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+
+
+
+
+ src/main/resources
+
+
+
+
+ src/test/resources
+
+
+
+
+ dev.aspectj
+ aspectj-maven-plugin
+ ${aspectj-maven-plugin.version}
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+
+
+
+ @{argLine}
+ --add-opens java.base/java.util=ALL-UNNAMED
+ --add-opens java.base/java.lang=ALL-UNNAMED
+
+
+
+
+
+ org.apache.avro
+ avro-maven-plugin
+ ${avro.version}
+
+
+ generate-test-sources
+ generate-test-sources
+
+ schema
+
+
+ ${project.basedir}/src/test/avro/
+ ${project.basedir}/target/generated-test-sources/avro/
+ String
+ ${project.basedir}/src/test/avro/
+ ${project.basedir}/target/generated-test-sources/avro/
+
+
+
+
+
+
+ io.github.ascopes
+ protobuf-maven-plugin
+ 3.3.0
+
+
+ generate-test-sources
+
+ generate-test
+
+ generate-test-sources
+
+ ${protobuf.version}
+
+ ${project.basedir}/src/test/proto
+
+ ${project.basedir}/target/generated-test-sources/protobuf
+
+
+
+
+
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+ 3.6.0
+
+
+ add-test-source
+ generate-test-sources
+
+ add-test-source
+
+
+
+ ${project.basedir}/target/generated-test-sources/avro
+ ${project.basedir}/target/generated-test-sources/protobuf
+
+
+
+
+
+
+
+
+
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java
new file mode 100644
index 000000000..4b96c49db
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Annotation to specify the deserialization type for Kafka messages.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.METHOD)
+public @interface Deserialization {
+ /**
+ * The type of deserialization to use.
+ * @return the deserialization type
+ */
+ DeserializationType type();
+}
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java
new file mode 100644
index 000000000..a4ac95389
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka;
+
+public enum DeserializationType {
+ LAMBDA_DEFAULT, KAFKA_JSON, KAFKA_AVRO, KAFKA_PROTOBUF
+}
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java
new file mode 100644
index 000000000..be8563b8e
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka;
+
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.lang.reflect.Type;
+import java.util.Map;
+
+import com.amazonaws.services.lambda.runtime.CustomPojoSerializer;
+import com.amazonaws.services.lambda.runtime.serialization.factories.JacksonFactory;
+
+import software.amazon.lambda.powertools.kafka.internal.DeserializationUtils;
+import software.amazon.lambda.powertools.kafka.serializers.KafkaAvroDeserializer;
+import software.amazon.lambda.powertools.kafka.serializers.KafkaJsonDeserializer;
+import software.amazon.lambda.powertools.kafka.serializers.KafkaProtobufDeserializer;
+import software.amazon.lambda.powertools.kafka.serializers.LambdaDefaultDeserializer;
+import software.amazon.lambda.powertools.kafka.serializers.PowertoolsDeserializer;
+
+/**
+ * Custom Lambda serializer supporting Kafka events. It delegates to the appropriate deserializer based on the
+ * deserialization type specified by {@link software.amazon.lambda.powertools.kafka.Deserialization} annotation.
+ *
+ * Kafka serializers need to be specified explicitly, otherwise, the default Lambda serializer from
+ * {@link com.amazonaws.services.lambda.runtime.serialization.factories.JacksonFactory} will be used.
+ */
+public class PowertoolsSerializer implements CustomPojoSerializer {
+ private static final Map DESERIALIZERS = Map.of(
+ DeserializationType.KAFKA_JSON, new KafkaJsonDeserializer(),
+ DeserializationType.KAFKA_AVRO, new KafkaAvroDeserializer(),
+ DeserializationType.KAFKA_PROTOBUF, new KafkaProtobufDeserializer(),
+ DeserializationType.LAMBDA_DEFAULT, new LambdaDefaultDeserializer());
+
+ private final PowertoolsDeserializer deserializer;
+
+ public PowertoolsSerializer() {
+ this.deserializer = DESERIALIZERS.getOrDefault(
+ DeserializationUtils.determineDeserializationType(),
+ new LambdaDefaultDeserializer());
+ }
+
+ @Override
+ public T fromJson(InputStream input, Type type) {
+ return deserializer.fromJson(input, type);
+ }
+
+ @Override
+ public T fromJson(String input, Type type) {
+ return deserializer.fromJson(input, type);
+ }
+
+ @Override
+ public void toJson(T value, OutputStream output, Type type) {
+ // This is the Lambda default Output serialization
+ JacksonFactory.getInstance().getSerializer(type).toJson(value, output);
+ }
+}
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java
new file mode 100644
index 000000000..1d2fe9aca
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.internal;
+
+import java.lang.reflect.Method;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+import software.amazon.lambda.powertools.kafka.Deserialization;
+import software.amazon.lambda.powertools.kafka.DeserializationType;
+
+/**
+ * Utility class to determine the deserialization type from Lambda request handler methods annotated with
+ * {@link Deserialization} utility.
+ *
+ * Relies on the Lambda _HANDLER environment variable to detect the currently active handler method.
+ */
+public final class DeserializationUtils {
+ private static final Logger LOGGER = LoggerFactory.getLogger(DeserializationUtils.class);
+
+ private DeserializationUtils() {
+ }
+
+ public static DeserializationType determineDeserializationType() {
+ String handler = System.getenv("_HANDLER");
+ if (handler == null || handler.trim().isEmpty()) {
+ LOGGER.error("Cannot determine deserialization type. No valid handler found in _HANDLER: {}", handler);
+ return DeserializationType.LAMBDA_DEFAULT;
+ }
+
+ try {
+ HandlerInfo handlerInfo = parseHandler(handler);
+ Class> handlerClazz = Class.forName(handlerInfo.className);
+
+ if (!RequestHandler.class.isAssignableFrom(handlerClazz)) {
+ LOGGER.warn("Class '{}' does not implement RequestHandler. Ignoring.", handlerInfo.className);
+ return DeserializationType.LAMBDA_DEFAULT;
+ }
+
+ return findDeserializationType(handlerClazz, handlerInfo.methodName);
+ } catch (Exception e) {
+ LOGGER.warn("Cannot determine deserialization type. Defaulting to standard.", e);
+ return DeserializationType.LAMBDA_DEFAULT;
+ }
+ }
+
+ private static HandlerInfo parseHandler(String handler) {
+ if (handler.contains("::")) {
+ int separatorIndex = handler.indexOf("::");
+ String className = handler.substring(0, separatorIndex);
+ String methodName = handler.substring(separatorIndex + 2);
+ return new HandlerInfo(className, methodName);
+ }
+
+ return new HandlerInfo(handler);
+ }
+
+ private static DeserializationType findDeserializationType(Class> handlerClass, String methodName) {
+ for (Method method : handlerClass.getDeclaredMethods()) {
+ if (method.getName().equals(methodName) && method.isAnnotationPresent(Deserialization.class)) {
+ Deserialization annotation = method.getAnnotation(Deserialization.class);
+ LOGGER.debug("Found deserialization type: {}", annotation.type());
+ return annotation.type();
+ }
+ }
+
+ return DeserializationType.LAMBDA_DEFAULT;
+ }
+
+ private static class HandlerInfo {
+ final String className;
+ final String methodName;
+
+ HandlerInfo(String className) {
+ this(className, "handleRequest");
+ }
+
+ HandlerInfo(String className, String methodName) {
+ this.className = className;
+ this.methodName = methodName;
+ }
+ }
+}
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java
new file mode 100644
index 000000000..8d0fc8f61
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java
@@ -0,0 +1,294 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Base64;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.common.header.Headers;
+import org.apache.kafka.common.header.internals.RecordHeaders;
+import org.apache.kafka.common.record.TimestampType;
+
+import com.amazonaws.services.lambda.runtime.events.KafkaEvent;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * Abstract base class for Kafka deserializers that implements common functionality.
+ */
+abstract class AbstractKafkaDeserializer implements PowertoolsDeserializer {
+ protected static final ObjectMapper objectMapper = new ObjectMapper()
+ .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+
+ /**
+ * Deserialize JSON from InputStream into ConsumerRecords
+ *
+ * @param input InputStream containing JSON data
+ * @param type Type representing ConsumerRecords
+ * @param The type to deserialize to
+ * @return Deserialized ConsumerRecords object
+ * @throws IllegalArgumentException if type is not ConsumerRecords
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ public T fromJson(InputStream input, Type type) {
+ if (!isConsumerRecordsType(type)) {
+ throw new IllegalArgumentException("Type must be ConsumerRecords when using this deserializer");
+ }
+
+ try {
+ // Parse the KafkaEvent from the input stream
+ KafkaEvent kafkaEvent = objectMapper.readValue(input, KafkaEvent.class);
+
+ // Extract the key and value types from the ConsumerRecords type
+ ParameterizedType parameterizedType = (ParameterizedType) type;
+ Type[] typeArguments = parameterizedType.getActualTypeArguments();
+ Class> keyType = (Class>) typeArguments[0];
+ Class> valueType = (Class>) typeArguments[1];
+
+ // Convert KafkaEvent to ConsumerRecords
+ return (T) convertToConsumerRecords(kafkaEvent, keyType, valueType);
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to deserialize Lambda handler input to ConsumerRecords", e);
+ }
+ }
+
+ /**
+ * Deserialize JSON from String into ConsumerRecords
+ *
+ * @param input String containing JSON data
+ * @param type Type representing ConsumerRecords
+ * @param The type to deserialize to
+ * @return Deserialized ConsumerRecords object
+ * @throws IllegalArgumentException if type is not ConsumerRecords
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ public T fromJson(String input, Type type) {
+ if (!isConsumerRecordsType(type)) {
+ throw new IllegalArgumentException("Type must be ConsumerRecords when using this deserializer");
+ }
+
+ try {
+ // Parse the KafkaEvent from the input string
+ KafkaEvent kafkaEvent = objectMapper.readValue(input, KafkaEvent.class);
+
+ // Extract the key and value types from the ConsumerRecords type
+ ParameterizedType parameterizedType = (ParameterizedType) type;
+ Type[] typeArguments = parameterizedType.getActualTypeArguments();
+ Class> keyType = (Class>) typeArguments[0];
+ Class> valueType = (Class>) typeArguments[1];
+
+ // Convert KafkaEvent to ConsumerRecords
+ return (T) convertToConsumerRecords(kafkaEvent, keyType, valueType);
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to deserialize Lambda handler input to ConsumerRecords", e);
+ }
+ }
+
+ private boolean isConsumerRecordsType(Type type) {
+ if (!(type instanceof ParameterizedType)) {
+ return false;
+ }
+
+ ParameterizedType parameterizedType = (ParameterizedType) type;
+ return parameterizedType.getRawType().equals(ConsumerRecords.class);
+ }
+
+ private ConsumerRecords convertToConsumerRecords(KafkaEvent kafkaEvent, Class keyType,
+ Class valueType) {
+ // Validate that this is actually a Kafka event by checking for required properties
+ if (kafkaEvent == null || kafkaEvent.getEventSource() == null) {
+ throw new RuntimeException(
+ "Failed to deserialize Lambda handler input to ConsumerRecords: Input is not a valid Kafka event.");
+ }
+
+ if (kafkaEvent.getRecords() == null) {
+ return ConsumerRecords.empty();
+ }
+
+ Map>> recordsMap = new HashMap<>();
+
+ for (Map.Entry> entry : kafkaEvent.getRecords().entrySet()) {
+ String topic = entry.getKey();
+
+ for (KafkaEvent.KafkaEventRecord eventRecord : entry.getValue()) {
+ ConsumerRecord consumerRecord = convertToConsumerRecord(topic, eventRecord, keyType, valueType);
+
+ TopicPartition topicPartition = new TopicPartition(topic, eventRecord.getPartition());
+ recordsMap.computeIfAbsent(topicPartition, k -> new ArrayList<>()).add(consumerRecord);
+ }
+ }
+
+ return createConsumerRecords(recordsMap);
+ }
+
+ /**
+ * Creates ConsumerRecords with compatibility for both Kafka 3.x.x and 4.x.x.
+ *
+ * @param Key type
+ * @param Value type
+ * @param records Map of records by topic partition
+ * @return ConsumerRecords instance
+ */
+ protected ConsumerRecords createConsumerRecords(
+ Map>> records) {
+ try {
+ // Try to use the Kafka 4.x.x constructor with nextOffsets parameter
+ return new ConsumerRecords<>(records, Map.of());
+ } catch (NoSuchMethodError e) {
+ // Fall back to Kafka 3.x.x constructor if 4.x.x is not available
+ return new ConsumerRecords<>(records);
+ }
+ }
+
+ private ConsumerRecord convertToConsumerRecord(
+ String topic,
+ KafkaEvent.KafkaEventRecord eventRecord,
+ Class keyType,
+ Class valueType) {
+
+ K key = deserializeField(eventRecord.getKey(), keyType, "key");
+ V value = deserializeField(eventRecord.getValue(), valueType, "value");
+ Headers headers = extractHeaders(eventRecord);
+
+ return new ConsumerRecord<>(
+ topic,
+ eventRecord.getPartition(),
+ eventRecord.getOffset(),
+ eventRecord.getTimestamp(),
+ TimestampType.valueOf(eventRecord.getTimestampType()),
+ // We set these to NULL_SIZE since they are not relevant in the Lambda environment due to ESM
+ // pre-processing.
+ ConsumerRecord.NULL_SIZE,
+ ConsumerRecord.NULL_SIZE,
+ key,
+ value,
+ headers,
+ Optional.empty());
+ }
+
+ private T deserializeField(String encodedData, Class type, String fieldName) {
+ if (encodedData == null) {
+ return null;
+ }
+
+ try {
+ byte[] decodedBytes = Base64.getDecoder().decode(encodedData);
+ return deserialize(decodedBytes, type);
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to deserialize Kafka record " + fieldName + ".", e);
+ }
+ }
+
+ private Headers extractHeaders(KafkaEvent.KafkaEventRecord eventRecord) {
+ Headers headers = new RecordHeaders();
+ if (eventRecord.getHeaders() != null) {
+ for (Map headerMap : eventRecord.getHeaders()) {
+ for (Map.Entry header : headerMap.entrySet()) {
+ if (header.getValue() != null) {
+ headers.add(header.getKey(), header.getValue());
+ }
+ }
+ }
+ }
+
+ return headers;
+ }
+
+ /**
+ * Template method to be implemented by subclasses for specific deserialization logic
+ * for complex types (non-primitives).
+ *
+ * @param The type to deserialize to
+ * @param data The byte array to deserialize coming from the base64 decoded Kafka field
+ * @param type The class type to deserialize to
+ * @return The deserialized object
+ * @throws IOException If deserialization fails
+ */
+ protected abstract T deserializeObject(byte[] data, Class type) throws IOException;
+
+ /**
+ * Main deserialize method that handles primitive types and delegates to subclasses for complex types.
+ *
+ * @param The type to deserialize to
+ * @param data The byte array to deserialize
+ * @param type The class type to deserialize to
+ * @return The deserialized object
+ * @throws IOException If deserialization fails
+ */
+ private T deserialize(byte[] data, Class type) throws IOException {
+ // First try to deserialize as a primitive type
+ T result = deserializePrimitive(data, type);
+ if (result != null) {
+ return result;
+ }
+
+ // Delegate to subclass for complex type deserialization
+ return deserializeObject(data, type);
+ }
+
+ /**
+ * Helper method for handling primitive types and String deserialization.
+ *
+ * @param The type to deserialize to
+ * @param data The byte array to deserialize
+ * @param type The class type to deserialize to
+ * @return The deserialized primitive or String, or null if not a primitive or String
+ */
+ @SuppressWarnings("unchecked")
+ private T deserializePrimitive(byte[] data, Class type) {
+ // Handle String type
+ if (type == String.class) {
+ return (T) new String(data, StandardCharsets.UTF_8);
+ }
+
+ // Handle primitive types and their wrappers
+ String str = new String(data, StandardCharsets.UTF_8);
+
+ if (type == Integer.class || type == int.class) {
+ return (T) Integer.valueOf(str);
+ } else if (type == Long.class || type == long.class) {
+ return (T) Long.valueOf(str);
+ } else if (type == Double.class || type == double.class) {
+ return (T) Double.valueOf(str);
+ } else if (type == Float.class || type == float.class) {
+ return (T) Float.valueOf(str);
+ } else if (type == Boolean.class || type == boolean.class) {
+ return (T) Boolean.valueOf(str);
+ } else if (type == Byte.class || type == byte.class) {
+ return (T) Byte.valueOf(str);
+ } else if (type == Short.class || type == short.class) {
+ return (T) Short.valueOf(str);
+ } else if (type == Character.class || type == char.class) {
+ if (!str.isEmpty()) {
+ return (T) Character.valueOf(str.charAt(0));
+ }
+ throw new IllegalArgumentException("Cannot convert empty string to char");
+ }
+
+ return null;
+ }
+}
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java
new file mode 100644
index 000000000..ddf09d4ff
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import java.io.IOException;
+
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.specific.SpecificRecordBase;
+
+/**
+ * Deserializer for Kafka records using Avro format.
+ */
+public class KafkaAvroDeserializer extends AbstractKafkaDeserializer {
+
+ @Override
+ protected T deserializeObject(byte[] data, Class type) throws IOException {
+ // If no Avro generated class is passed we cannot deserialize using Avro
+ if (SpecificRecordBase.class.isAssignableFrom(type)) {
+ try {
+ DatumReader datumReader = new SpecificDatumReader<>(type);
+ Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
+
+ return datumReader.read(null, decoder);
+ } catch (Exception e) {
+ throw new IOException("Failed to deserialize Avro data.", e);
+ }
+ } else {
+ throw new IOException("Unsupported type for Avro deserialization: " + type.getName() + ". "
+ + "Avro deserialization requires a type of org.apache.avro.specific.SpecificRecord. "
+ + "Consider using an alternative Deserializer.");
+ }
+ }
+}
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java
new file mode 100644
index 000000000..ed64f3786
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+
+/**
+ * Deserializer for Kafka records using JSON format.
+ */
+public class KafkaJsonDeserializer extends AbstractKafkaDeserializer {
+
+ @Override
+ protected T deserializeObject(byte[] data, Class type) throws IOException {
+ String decodedStr = new String(data, StandardCharsets.UTF_8);
+
+ return objectMapper.readValue(decodedStr, type);
+ }
+}
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java
new file mode 100644
index 000000000..025f203c4
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import java.io.IOException;
+import com.google.protobuf.Message;
+import com.google.protobuf.Parser;
+
+/**
+ * Deserializer for Kafka records using Protocol Buffers format.
+ */
+public class KafkaProtobufDeserializer extends AbstractKafkaDeserializer {
+
+ @Override
+ @SuppressWarnings("unchecked")
+ protected T deserializeObject(byte[] data, Class type) throws IOException {
+ // If no Protobuf generated class is passed we cannot deserialize using Protobuf
+ if (Message.class.isAssignableFrom(type)) {
+ try {
+ // Get the parser from the generated Protobuf class
+ Parser parser = (Parser) type.getMethod("parser").invoke(null);
+ Message message = parser.parseFrom(data);
+ return type.cast(message);
+ } catch (Exception e) {
+ throw new IOException("Failed to deserialize Protobuf data.", e);
+ }
+ } else {
+ throw new IOException("Unsupported type for Protobuf deserialization: " + type.getName() + ". "
+ + "Protobuf deserialization requires a type of com.google.protobuf.Message. "
+ + "Consider using an alternative Deserializer.");
+ }
+ }
+}
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java
new file mode 100644
index 000000000..a7ea15d2f
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.Type;
+import java.nio.charset.StandardCharsets;
+
+import com.amazonaws.services.lambda.runtime.serialization.factories.JacksonFactory;
+
+/**
+ * Default deserializer for Kafka events proxying to Lambda default behavior.
+ *
+ * This deserializer uses the default Jackson ObjectMapper to deserialize the event from
+ * {@link com.amazonaws.services.lambda.runtime.serialization}.
+ */
+public class LambdaDefaultDeserializer implements PowertoolsDeserializer {
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public T fromJson(InputStream input, Type type) {
+ // If the target type does not require conversion, simply return the value itself
+ if (type.equals(InputStream.class)) {
+ return (T) input;
+ }
+
+ // If the target type is String, read the input stream as a String
+ if (type.equals(String.class)) {
+ try {
+ return (T) new String(input.readAllBytes(), StandardCharsets.UTF_8);
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to read input stream as String", e);
+ }
+ }
+
+ return (T) JacksonFactory.getInstance().getSerializer(type).fromJson(input);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public T fromJson(String input, Type type) {
+ // If the target type does not require conversion, simply return the value itself
+ if (type.equals(String.class)) {
+ return (T) input;
+ }
+
+ // If the target type is InputStream, read the input stream as a String
+ if (type.equals(InputStream.class)) {
+ return (T) input.getBytes(StandardCharsets.UTF_8);
+ }
+
+ return (T) JacksonFactory.getInstance().getSerializer(type).fromJson(input);
+ }
+}
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java
new file mode 100644
index 000000000..1ac0ca0ba
--- /dev/null
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import java.io.InputStream;
+import java.lang.reflect.Type;
+
+/**
+ * Interface for deserializers that can handle both String and InputStream inputs.
+ *
+ * Similar to {@link com.amazonaws.services.lambda.runtime.CustomPojoSerializer} but only for input deserialization.
+ */
+public interface PowertoolsDeserializer {
+ T fromJson(InputStream input, Type type);
+
+ T fromJson(String input, Type type);
+}
diff --git a/powertools-kafka/src/main/resources/META-INF/services/com.amazonaws.services.lambda.runtime.CustomPojoSerializer b/powertools-kafka/src/main/resources/META-INF/services/com.amazonaws.services.lambda.runtime.CustomPojoSerializer
new file mode 100644
index 000000000..abc84b035
--- /dev/null
+++ b/powertools-kafka/src/main/resources/META-INF/services/com.amazonaws.services.lambda.runtime.CustomPojoSerializer
@@ -0,0 +1 @@
+software.amazon.lambda.powertools.kafka.PowertoolsSerializer
diff --git a/powertools-kafka/src/test/avro/TestProduct.avsc b/powertools-kafka/src/test/avro/TestProduct.avsc
new file mode 100644
index 000000000..aad903d40
--- /dev/null
+++ b/powertools-kafka/src/test/avro/TestProduct.avsc
@@ -0,0 +1,10 @@
+{
+ "namespace": "software.amazon.lambda.powertools.kafka.serializers.test.avro",
+ "type": "record",
+ "name": "TestProduct",
+ "fields": [
+ {"name": "id", "type": "int"},
+ {"name": "name", "type": "string"},
+ {"name": "price", "type": "double"}
+ ]
+}
\ No newline at end of file
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTest.java
new file mode 100644
index 000000000..964498d99
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTest.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.lang.reflect.Method;
+
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.junit.jupiter.api.Test;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+class DeserializationTest {
+
+ @Test
+ void shouldHaveCorrectAnnotationRetention() {
+ // Given
+ Class annotationClass = Deserialization.class;
+
+ // When/Then
+ assertThat(annotationClass.isAnnotation()).isTrue();
+ assertThat(annotationClass.getAnnotation(java.lang.annotation.Retention.class).value())
+ .isEqualTo(java.lang.annotation.RetentionPolicy.RUNTIME);
+ assertThat(annotationClass.getAnnotation(java.lang.annotation.Target.class).value())
+ .contains(java.lang.annotation.ElementType.METHOD);
+ }
+
+ @Test
+ void shouldHaveTypeMethod() throws NoSuchMethodException {
+ // Given
+ Class annotationClass = Deserialization.class;
+
+ // When
+ java.lang.reflect.Method typeMethod = annotationClass.getMethod("type");
+
+ // Then
+ assertThat(typeMethod.getReturnType()).isEqualTo(DeserializationType.class);
+ }
+
+ @Test
+ void shouldBeAccessibleReflectivelyAtRuntime() throws NoSuchMethodException, SecurityException {
+ // Given
+ class TestHandler implements RequestHandler, String> {
+ @Override
+ @Deserialization(type = DeserializationType.KAFKA_JSON)
+ public String handleRequest(ConsumerRecords input, Context context) {
+ return "OK";
+ }
+ }
+
+ // When
+ Method handleRequestMethod = TestHandler.class.getMethod("handleRequest", ConsumerRecords.class, Context.class);
+
+ // Then
+ Deserialization annotation = handleRequestMethod.getAnnotation(Deserialization.class);
+ assertThat(annotation).isNotNull();
+ assertThat(annotation.type()).isEqualTo(DeserializationType.KAFKA_JSON);
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTypeTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTypeTest.java
new file mode 100644
index 000000000..6999b66d4
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTypeTest.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import org.junit.jupiter.api.Test;
+
+// Mainly present to remind us to write unit tests once we add support for a new Deserializer. If we add a new type in
+// the enum it will fail this test.
+class DeserializationTypeTest {
+
+ @Test
+ void shouldHaveExpectedEnumValues() {
+ // Given/When
+ DeserializationType[] values = DeserializationType.values();
+
+ // Then
+ assertThat(values).contains(
+ DeserializationType.LAMBDA_DEFAULT,
+ DeserializationType.KAFKA_JSON,
+ DeserializationType.KAFKA_AVRO,
+ DeserializationType.KAFKA_PROTOBUF);
+ }
+
+ @Test
+ void shouldBeAbleToValueOf() {
+ // Given/When
+ DeserializationType jsonType = DeserializationType.valueOf("KAFKA_JSON");
+ DeserializationType avroType = DeserializationType.valueOf("KAFKA_AVRO");
+ DeserializationType protobufType = DeserializationType.valueOf("KAFKA_PROTOBUF");
+ DeserializationType defaultType = DeserializationType.valueOf("LAMBDA_DEFAULT");
+
+ // Then
+ assertThat(jsonType).isEqualTo(DeserializationType.KAFKA_JSON);
+ assertThat(avroType).isEqualTo(DeserializationType.KAFKA_AVRO);
+ assertThat(protobufType).isEqualTo(DeserializationType.KAFKA_PROTOBUF);
+ assertThat(defaultType).isEqualTo(DeserializationType.LAMBDA_DEFAULT);
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java
new file mode 100644
index 000000000..6ce57ecd5
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java
@@ -0,0 +1,417 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static software.amazon.lambda.powertools.kafka.testutils.TestUtils.createConsumerRecordsType;
+import static software.amazon.lambda.powertools.kafka.testutils.TestUtils.serializeAvro;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.Type;
+import java.util.Arrays;
+import java.util.Base64;
+import java.util.List;
+import java.util.stream.Stream;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.common.TopicPartition;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
+import org.junitpioneer.jupiter.SetEnvironmentVariable;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import software.amazon.lambda.powertools.kafka.serializers.LambdaDefaultDeserializer;
+import software.amazon.lambda.powertools.kafka.serializers.PowertoolsDeserializer;
+import software.amazon.lambda.powertools.kafka.testutils.TestProductPojo;
+
+// This is testing the whole serializer end-to-end. More detailed serializer tests are placed in serializers folder.
+@ExtendWith(MockitoExtension.class)
+class PowertoolsSerializerTest {
+
+ @Mock
+ private PowertoolsDeserializer mockDeserializer;
+
+ private static final ObjectMapper objectMapper = new ObjectMapper();
+
+ // CustomPojoSerializer has fromJson(String input, ...) and fromJson(InputStream input, ...). We want to test both.
+ static Stream inputTypes() {
+ return Stream.of(InputType.INPUT_STREAM, InputType.STRING);
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ @SetEnvironmentVariable(key = "_HANDLER", value = "")
+ void shouldUseDefaultDeserializerWhenHandlerNotFound(InputType inputType) throws JsonProcessingException {
+ // When
+ PowertoolsSerializer serializer = new PowertoolsSerializer();
+
+ // Then
+ TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2"));
+ String json = objectMapper.writeValueAsString(product);
+
+ // This will use the Lambda default deserializer (no Kafka logic)
+ TestProductPojo result;
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream input = new ByteArrayInputStream(json.getBytes());
+ result = serializer.fromJson(input, TestProductPojo.class);
+ } else {
+ result = serializer.fromJson(json, TestProductPojo.class);
+ }
+
+ assertThat(result.getId()).isEqualTo(123);
+ assertThat(result.getName()).isEqualTo("Test Product");
+ assertThat(result.getPrice()).isEqualTo(99.99);
+ assertThat(result.getTags()).containsExactly("tag1", "tag2");
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.DefaultHandler::handleRequest")
+ void shouldUseLambdaDefaultDeserializer(InputType inputType) throws JsonProcessingException {
+ // When
+ PowertoolsSerializer serializer = new PowertoolsSerializer();
+
+ // Then
+ TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2"));
+ String json = objectMapper.writeValueAsString(product);
+
+ // This will use the Lambda default deserializer (no Kafka logic)
+ TestProductPojo result;
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream input = new ByteArrayInputStream(json.getBytes());
+ result = serializer.fromJson(input, TestProductPojo.class);
+ } else {
+ result = serializer.fromJson(json, TestProductPojo.class);
+ }
+
+ assertThat(result.getId()).isEqualTo(123);
+ assertThat(result.getName()).isEqualTo("Test Product");
+ assertThat(result.getPrice()).isEqualTo(99.99);
+ assertThat(result.getTags()).containsExactly("tag1", "tag2");
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.StringHandler::handleRequest")
+ void shouldHandleStringInputType() {
+ // When
+ PowertoolsSerializer serializer = new PowertoolsSerializer();
+
+ // Then
+ String testInput = "This is a test string";
+
+ // This should directly return the input string
+ String result = serializer.fromJson(testInput, String.class);
+
+ assertThat(result).isEqualTo(testInput);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.InputStreamHandler::handleRequest")
+ void shouldHandleInputStreamType() throws IOException {
+ // When
+ PowertoolsSerializer serializer = new PowertoolsSerializer();
+
+ // Then
+ String testInput = "This is a test string";
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(testInput.getBytes());
+
+ // This should return the input stream directly
+ InputStream result = serializer.fromJson(inputStream, InputStream.class);
+
+ // Read the content to verify it's the same
+ String resultString = new String(result.readAllBytes());
+ assertThat(resultString).isEqualTo(testInput);
+ }
+
+ @Test
+ void shouldConvertInputStreamToString() {
+ // When
+ LambdaDefaultDeserializer deserializer = new LambdaDefaultDeserializer();
+
+ // Then
+ String expected = "This is a test string";
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(expected.getBytes());
+
+ // Convert InputStream to String
+ String result = deserializer.fromJson(inputStream, String.class);
+
+ // Verify the result
+ assertThat(result).isEqualTo(expected);
+ }
+
+ @Test
+ void shouldThrowRuntimeExceptionWhenInputStreamIsInvalid() {
+ // When
+ LambdaDefaultDeserializer deserializer = new LambdaDefaultDeserializer();
+
+ // Create a problematic InputStream that throws IOException when read
+ InputStream problematicStream = new InputStream() {
+ @Override
+ public int read() throws IOException {
+ throw new IOException("Simulated IO error");
+ }
+
+ @Override
+ public byte[] readAllBytes() throws IOException {
+ throw new IOException("Simulated IO error");
+ }
+ };
+
+ // Then
+ assertThatThrownBy(() -> deserializer.fromJson(problematicStream, String.class))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Failed to read input stream as String");
+ }
+
+ @Test
+ void shouldConvertStringToByteArray() {
+ // When
+ LambdaDefaultDeserializer deserializer = new LambdaDefaultDeserializer();
+
+ // Then
+ String input = "This is a test string";
+
+ // Convert String to InputStream
+ byte[] result = deserializer.fromJson(input, InputStream.class);
+
+ // Verify the result
+ String resultString = new String(result);
+ assertThat(resultString).isEqualTo(input);
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.JsonHandler::handleRequest")
+ void shouldUseKafkaJsonDeserializer(InputType inputType) throws JsonProcessingException {
+ // When
+ PowertoolsSerializer serializer = new PowertoolsSerializer();
+
+ // Create a TestProductPojo and serialize it
+ TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2"));
+ String productJson = objectMapper.writeValueAsString(product);
+ String base64Value = Base64.getEncoder().encodeToString(productJson.getBytes());
+
+ // Then
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": \"" + base64Value + "\",\n" +
+ " \"headers\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+
+ Type type = createConsumerRecordsType(String.class, TestProductPojo.class);
+
+ // This should use the KafkaJsonDeserializer
+ ConsumerRecords records;
+
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream input = new ByteArrayInputStream(kafkaJson.getBytes());
+ records = serializer.fromJson(input, type);
+ } else {
+ records = serializer.fromJson(kafkaJson, type);
+ }
+
+ // Verify we got a valid ConsumerRecords object
+ assertThat(records).isNotNull();
+
+ // Get the record and verify its content
+ TopicPartition tp = new TopicPartition("test-topic-1", 0);
+ List> topicRecords = records.records(tp);
+ assertThat(topicRecords).hasSize(1);
+
+ ConsumerRecord consumerRecord = topicRecords.get(0);
+ TestProductPojo deserializedProduct = consumerRecord.value();
+
+ assertThat(deserializedProduct.getId()).isEqualTo(123);
+ assertThat(deserializedProduct.getName()).isEqualTo("Test Product");
+ assertThat(deserializedProduct.getPrice()).isEqualTo(99.99);
+ assertThat(deserializedProduct.getTags()).containsExactly("tag1", "tag2");
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.AvroHandler::handleRequest")
+ void shouldUseKafkaAvroDeserializer(InputType inputType) throws IOException {
+ // When
+ PowertoolsSerializer serializer = new PowertoolsSerializer();
+
+ // Create an Avro TestProduct and serialize it
+ software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct product = new software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct(
+ 123, "Test Product", 99.99);
+ String base64Value = Base64.getEncoder().encodeToString(serializeAvro(product));
+
+ // Then
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": \"" + base64Value + "\",\n" +
+ " \"headers\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+
+ Type type = createConsumerRecordsType(String.class,
+ software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct.class);
+
+ // This should use the KafkaAvroDeserializer
+ ConsumerRecords records;
+
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream input = new ByteArrayInputStream(kafkaJson.getBytes());
+ records = serializer.fromJson(input, type);
+ } else {
+ records = serializer.fromJson(kafkaJson, type);
+ }
+
+ // Verify we got a valid ConsumerRecords object
+ assertThat(records).isNotNull();
+
+ // Get the record and verify its content
+ TopicPartition tp = new TopicPartition("test-topic-1", 0);
+ List> topicRecords = records
+ .records(tp);
+ assertThat(topicRecords).hasSize(1);
+
+ ConsumerRecord consumerRecord = topicRecords
+ .get(0);
+ software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct deserializedProduct = consumerRecord
+ .value();
+
+ assertThat(deserializedProduct.getId()).isEqualTo(123);
+ assertThat(deserializedProduct.getName()).isEqualTo("Test Product");
+ assertThat(deserializedProduct.getPrice()).isEqualTo(99.99);
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.ProtobufHandler::handleRequest")
+ void shouldUseKafkaProtobufDeserializer(InputType inputType) {
+ // When
+ PowertoolsSerializer serializer = new PowertoolsSerializer();
+
+ // Create a Protobuf TestProduct and serialize it
+ software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct product = software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct
+ .newBuilder()
+ .setId(123)
+ .setName("Test Product")
+ .setPrice(99.99)
+ .build();
+ String base64Value = Base64.getEncoder().encodeToString(product.toByteArray());
+
+ // Then
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": \"" + base64Value + "\",\n" +
+ " \"headers\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+
+ Type type = createConsumerRecordsType(String.class,
+ software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct.class);
+
+ // This should use the KafkaProtobufDeserializer
+ ConsumerRecords records;
+
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream input = new ByteArrayInputStream(kafkaJson.getBytes());
+ records = serializer.fromJson(input, type);
+ } else {
+ records = serializer.fromJson(kafkaJson, type);
+ }
+
+ // Verify we got a valid ConsumerRecords object
+ assertThat(records).isNotNull();
+
+ // Get the record and verify its content
+ TopicPartition tp = new TopicPartition("test-topic-1", 0);
+ List> topicRecords = records
+ .records(tp);
+ assertThat(topicRecords).hasSize(1);
+
+ ConsumerRecord consumerRecord = topicRecords
+ .get(0);
+ software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct deserializedProduct = consumerRecord
+ .value();
+
+ assertThat(deserializedProduct.getId()).isEqualTo(123);
+ assertThat(deserializedProduct.getName()).isEqualTo("Test Product");
+ assertThat(deserializedProduct.getPrice()).isEqualTo(99.99);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "")
+ void shouldDelegateToJsonOutput() {
+ // Given
+ PowertoolsSerializer serializer = new PowertoolsSerializer();
+
+ // When
+ TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2"));
+ ByteArrayOutputStream output = new ByteArrayOutputStream();
+
+ // Then
+ serializer.toJson(product, output, TestProductPojo.class);
+ String json = output.toString();
+
+ // Verify the output is valid JSON
+ assertThat(json).contains("\"id\":123")
+ .contains("\"name\":\"Test Product\"")
+ .contains("\"price\":99.99")
+ .contains("\"tags\":[\"tag1\",\"tag2\"]");
+ }
+
+ private enum InputType {
+ INPUT_STREAM, STRING
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java
new file mode 100644
index 000000000..21f38d9ab
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.internal;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import org.junit.jupiter.api.Test;
+import org.junitpioneer.jupiter.SetEnvironmentVariable;
+
+import software.amazon.lambda.powertools.kafka.DeserializationType;
+
+class DeserializationUtilsTest {
+
+ // NOTE: We don't use a parameterized test here because this is not compatible with the @SetEnvironmentVariable
+ // annotation.
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "")
+ void shouldReturnDefaultDeserializationTypeWhenHandlerIsEmpty() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = " ")
+ void shouldReturnDefaultDeserializationTypeWhenHandlerIsWhitespaceOnly() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "InvalidHandlerFormat")
+ void shouldReturnDefaultDeserializationTypeWhenHandlerFormatIsInvalid() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "com.example.NonExistentClass::handleRequest")
+ void shouldReturnDefaultDeserializationTypeWhenClassNotFound() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "java.lang.String::toString")
+ void shouldReturnDefaultDeserializationTypeWhenClassIsNotRequestHandler() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.internal.DeserializationUtilsTest$TestHandler::nonExistentMethod")
+ void shouldReturnDefaultDeserializationTypeWhenMethodNotFound() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.JsonHandler::handleRequest")
+ void shouldReturnJsonDeserializationTypeFromAnnotation() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.KAFKA_JSON);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.AvroHandler::handleRequest")
+ void shouldReturnAvroDeserializationTypeFromAnnotation() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.KAFKA_AVRO);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.ProtobufHandler::handleRequest")
+ void shouldReturnProtobufDeserializationTypeFromAnnotation() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.KAFKA_PROTOBUF);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.JsonHandler")
+ void shouldReturnJsonDeserializationTypeFromAnnotationWithAbbreviatedHandler() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.KAFKA_JSON);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.AvroHandler")
+ void shouldReturnAvroDeserializationTypeFromAnnotationWithAbbreviatedHandler() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.KAFKA_AVRO);
+ }
+
+ @Test
+ @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.ProtobufHandler")
+ void shouldReturnProtobufDeserializationTypeFromAnnotationWithAbbreviatedHandler() {
+ // When
+ DeserializationType type = DeserializationUtils.determineDeserializationType();
+
+ // Then
+ assertThat(type).isEqualTo(DeserializationType.KAFKA_PROTOBUF);
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java
new file mode 100644
index 000000000..512058bca
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java
@@ -0,0 +1,473 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.util.Base64;
+import java.util.List;
+import java.util.stream.Stream;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.common.TopicPartition;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import software.amazon.lambda.powertools.kafka.testutils.TestProductPojo;
+import software.amazon.lambda.powertools.kafka.testutils.TestUtils;
+
+class AbstractKafkaDeserializerTest {
+
+ private TestDeserializer deserializer;
+ private static final ObjectMapper objectMapper = new ObjectMapper();
+
+ @BeforeEach
+ void setUp() {
+ deserializer = new TestDeserializer();
+ }
+
+ // CustomPojoSerializer has fromJson(String input, ...) and fromJson(InputStream input, ...). We want to test both.
+ static Stream inputTypes() {
+ return Stream.of(InputType.INPUT_STREAM, InputType.STRING);
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldThrowExceptionWhenTypeIsNotConsumerRecords(InputType inputType) {
+ // Given
+ String json = "{}";
+
+ // When/Then
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(json.getBytes());
+ assertThatThrownBy(() -> deserializer.fromJson(inputStream, String.class))
+ .isInstanceOf(IllegalArgumentException.class)
+ .hasMessageContaining("Type must be ConsumerRecords");
+ } else {
+ assertThatThrownBy(() -> deserializer.fromJson(json, String.class))
+ .isInstanceOf(IllegalArgumentException.class)
+ .hasMessageContaining("Type must be ConsumerRecords");
+ }
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldThrowExceptionWhenJsonIsInvalid(InputType inputType) {
+ // Given
+ String invalidJson = "{invalid json";
+ Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class);
+
+ // When/Then
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(invalidJson.getBytes());
+ assertThatThrownBy(() -> deserializer.fromJson(inputStream, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Failed to deserialize Lambda handler input to ConsumerRecords");
+ } else {
+ assertThatThrownBy(() -> deserializer.fromJson(invalidJson, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Failed to deserialize Lambda handler input to ConsumerRecords");
+ }
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldThrowExceptionWhenKeyDeserializationFails(InputType inputType) {
+ // Given
+ // Create a Kafka event with invalid Base64 for the key
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": \"invalid-base64!\",\n" +
+ " \"value\": \"eyJrZXkiOiJ2YWx1ZSJ9\",\n" +
+ " \"headers\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+ Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class);
+
+ // When/Then
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes());
+ assertThatThrownBy(() -> deserializer.fromJson(inputStream, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Failed to deserialize Kafka record key");
+ } else {
+ assertThatThrownBy(() -> deserializer.fromJson(kafkaJson, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Failed to deserialize Kafka record key");
+ }
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldThrowExceptionWhenValueDeserializationFails(InputType inputType) {
+ // Given
+ // Create a Kafka event with invalid Base64 for the value
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": \"invalid-base64!\",\n" +
+ " \"headers\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+ Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class);
+
+ // When/Then
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes());
+ assertThatThrownBy(() -> deserializer.fromJson(inputStream, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Failed to deserialize Kafka record value");
+ } else {
+ assertThatThrownBy(() -> deserializer.fromJson(kafkaJson, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Failed to deserialize Kafka record value");
+ }
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldHandleNullKeyAndValue(InputType inputType) {
+ // Given
+ // Create a Kafka event with null key and value
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": null,\n" +
+ " \"headers\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+ Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class);
+
+ // When
+ ConsumerRecords records;
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes());
+ records = deserializer.fromJson(inputStream, type);
+ } else {
+ records = deserializer.fromJson(kafkaJson, type);
+ }
+
+ // Then
+ assertThat(records).isNotNull();
+ TopicPartition tp = new TopicPartition("test-topic-1", 0);
+ List> topicRecords = records.records(tp);
+ assertThat(topicRecords).hasSize(1);
+
+ ConsumerRecord consumerRecord = topicRecords.get(0);
+ assertThat(consumerRecord.key()).isNull();
+ assertThat(consumerRecord.value()).isNull();
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldHandleHeadersCorrectly(InputType inputType) {
+ // Given
+ // Create a Kafka event with headers
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": null,\n" +
+ " \"headers\": [\n" +
+ " {\n" +
+ " \"headerKey1\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101, 49],\n" +
+ " \"headerKey2\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101, 50]\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+ Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class);
+
+ // When
+ ConsumerRecords records;
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes());
+ records = deserializer.fromJson(inputStream, type);
+ } else {
+ records = deserializer.fromJson(kafkaJson, type);
+ }
+
+ // Then
+ assertThat(records).isNotNull();
+ TopicPartition tp = new TopicPartition("test-topic-1", 0);
+ List> topicRecords = records.records(tp);
+ assertThat(topicRecords).hasSize(1);
+
+ ConsumerRecord consumerRecord = topicRecords.get(0);
+ assertThat(consumerRecord.headers()).isNotNull();
+ assertThat(consumerRecord.headers().toArray()).hasSize(2);
+ assertThat(new String(consumerRecord.headers().lastHeader("headerKey1").value())).isEqualTo("headerValue1");
+ assertThat(new String(consumerRecord.headers().lastHeader("headerKey2").value())).isEqualTo("headerValue2");
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldHandleEmptyRecords(InputType inputType) {
+ // Given
+ // Create a Kafka event with no records
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {}\n" +
+ "}";
+ Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class);
+
+ // When
+ ConsumerRecords records;
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes());
+ records = deserializer.fromJson(inputStream, type);
+ } else {
+ records = deserializer.fromJson(kafkaJson, type);
+ }
+
+ // Then
+ assertThat(records).isNotNull();
+ assertThat(records.count()).isZero();
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldHandleNullRecords(InputType inputType) {
+ // Given
+ // Create a Kafka event with null records
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\"\n" +
+ "}";
+ Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class);
+
+ // When
+ ConsumerRecords records;
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes());
+ records = deserializer.fromJson(inputStream, type);
+ } else {
+ records = deserializer.fromJson(kafkaJson, type);
+ }
+
+ // Then
+ assertThat(records).isNotNull();
+ assertThat(records.count()).isZero();
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldThrowExceptionWhenEventSourceIsNull(InputType inputType) {
+ // Given
+ // Create a JSON without eventSource property
+ String kafkaJson = "{\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": null,\n" +
+ " \"value\": null,\n" +
+ " \"headers\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+ Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class);
+
+ // When/Then
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes());
+ assertThatThrownBy(() -> deserializer.fromJson(inputStream, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Input is not a valid Kafka event");
+ } else {
+ assertThatThrownBy(() -> deserializer.fromJson(kafkaJson, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Input is not a valid Kafka event");
+ }
+ }
+
+ static Stream primitiveTypesProvider() {
+ return Stream.of(
+ // For each primitive type, test with both INPUT_STREAM and STRING
+ Arguments.of("String-InputStream", String.class, "test-string", "test-string", InputType.INPUT_STREAM),
+ Arguments.of("String-String", String.class, "test-string", "test-string", InputType.STRING),
+ Arguments.of("Integer-InputStream", Integer.class, "123", 123, InputType.INPUT_STREAM),
+ Arguments.of("Integer-String", Integer.class, "123", 123, InputType.STRING),
+ Arguments.of("Long-InputStream", Long.class, "123456789", 123456789L, InputType.INPUT_STREAM),
+ Arguments.of("Long-String", Long.class, "123456789", 123456789L, InputType.STRING),
+ Arguments.of("Double-InputStream", Double.class, "123.456", 123.456, InputType.INPUT_STREAM),
+ Arguments.of("Double-String", Double.class, "123.456", 123.456, InputType.STRING),
+ Arguments.of("Float-InputStream", Float.class, "123.45", 123.45f, InputType.INPUT_STREAM),
+ Arguments.of("Float-String", Float.class, "123.45", 123.45f, InputType.STRING),
+ Arguments.of("Boolean-InputStream", Boolean.class, "true", true, InputType.INPUT_STREAM),
+ Arguments.of("Boolean-String", Boolean.class, "true", true, InputType.STRING),
+ Arguments.of("Byte-InputStream", Byte.class, "127", (byte) 127, InputType.INPUT_STREAM),
+ Arguments.of("Byte-String", Byte.class, "127", (byte) 127, InputType.STRING),
+ Arguments.of("Short-InputStream", Short.class, "32767", (short) 32767, InputType.INPUT_STREAM),
+ Arguments.of("Short-String", Short.class, "32767", (short) 32767, InputType.STRING),
+ Arguments.of("Character-InputStream", Character.class, "A", 'A', InputType.INPUT_STREAM),
+ Arguments.of("Character-String", Character.class, "A", 'A', InputType.STRING));
+ }
+
+ @ParameterizedTest(name = "Should handle {0}")
+ @MethodSource("primitiveTypesProvider")
+ void shouldHandlePrimitiveTypes(String testName, Class keyType, String keyValue, T expectedKey,
+ InputType inputType) throws IOException {
+ // Given
+ // Create a TestProductPojo and serialize it to JSON
+ TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, null);
+ String productJson = objectMapper.writeValueAsString(product);
+ String base64Value = Base64.getEncoder().encodeToString(productJson.getBytes());
+ String base64Key = Base64.getEncoder().encodeToString(keyValue.getBytes());
+
+ // Create a Kafka event with primitive type for key
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": \"" + base64Key + "\",\n" +
+ " \"value\": \"" + base64Value + "\",\n" +
+ " \"headers\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+ Type type = TestUtils.createConsumerRecordsType(keyType, TestProductPojo.class);
+
+ // When
+ ConsumerRecords records;
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes());
+ records = deserializer.fromJson(inputStream, type);
+ } else {
+ records = deserializer.fromJson(kafkaJson, type);
+ }
+
+ // Then
+ assertThat(records).isNotNull();
+ TopicPartition tp = new TopicPartition("test-topic-1", 0);
+ List> topicRecords = records.records(tp);
+ assertThat(topicRecords).hasSize(1);
+
+ ConsumerRecord consumerRecord = topicRecords.get(0);
+ assertThat(consumerRecord.key()).isEqualTo(expectedKey);
+ assertThat(consumerRecord.value()).isNotNull();
+ assertThat(consumerRecord.value().getId()).isEqualTo(123);
+ }
+
+ @ParameterizedTest
+ @MethodSource("inputTypes")
+ void shouldThrowExceptionWhenConvertingEmptyStringToChar(InputType inputType) {
+ // Given
+ String base64EmptyString = Base64.getEncoder().encodeToString("".getBytes());
+ String kafkaJson = "{\n" +
+ " \"eventSource\": \"aws:kafka\",\n" +
+ " \"records\": {\n" +
+ " \"test-topic-1\": [\n" +
+ " {\n" +
+ " \"topic\": \"test-topic-1\",\n" +
+ " \"partition\": 0,\n" +
+ " \"offset\": 15,\n" +
+ " \"timestamp\": 1545084650987,\n" +
+ " \"timestampType\": \"CREATE_TIME\",\n" +
+ " \"key\": \"" + base64EmptyString + "\",\n" +
+ " \"value\": null,\n" +
+ " \"headers\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ "}";
+ Type type = TestUtils.createConsumerRecordsType(Character.class, TestProductPojo.class);
+
+ // When/Then
+ if (inputType == InputType.INPUT_STREAM) {
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes());
+ assertThatThrownBy(() -> deserializer.fromJson(inputStream, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Failed to deserialize Kafka record key")
+ .hasRootCauseInstanceOf(IllegalArgumentException.class)
+ .hasRootCauseMessage("Cannot convert empty string to char");
+ } else {
+ assertThatThrownBy(() -> deserializer.fromJson(kafkaJson, type))
+ .isInstanceOf(RuntimeException.class)
+ .hasMessageContaining("Failed to deserialize Kafka record key")
+ .hasRootCauseInstanceOf(IllegalArgumentException.class)
+ .hasRootCauseMessage("Cannot convert empty string to char");
+ }
+ }
+
+ // Test implementation of AbstractKafkaDeserializer
+ private static class TestDeserializer extends AbstractKafkaDeserializer {
+ @Override
+ protected T deserializeObject(byte[] data, Class type) throws IOException {
+ return objectMapper.readValue(data, type);
+ }
+ }
+
+ enum InputType {
+ INPUT_STREAM, STRING
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java
new file mode 100644
index 000000000..a0b59b136
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static software.amazon.lambda.powertools.kafka.testutils.TestUtils.serializeAvro;
+
+import java.io.IOException;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct;
+
+class KafkaAvroDeserializerTest {
+
+ private KafkaAvroDeserializer deserializer;
+
+ @BeforeEach
+ void setUp() {
+ deserializer = new KafkaAvroDeserializer();
+ }
+
+ @Test
+ void shouldThrowExceptionWhenTypeIsNotAvroSpecificRecord() {
+ // Given
+ byte[] data = new byte[] { 1, 2, 3 };
+
+ // When/Then
+ assertThatThrownBy(() -> deserializer.deserializeObject(data, String.class))
+ .isInstanceOf(IOException.class)
+ .hasMessageContaining("Unsupported type for Avro deserialization");
+ }
+
+ @Test
+ void shouldDeserializeValidAvroData() throws IOException {
+ // Given
+ TestProduct product = new TestProduct(123, "Test Product", 99.99);
+ byte[] avroData = serializeAvro(product);
+
+ // When
+ TestProduct result = deserializer.deserializeObject(avroData, TestProduct.class);
+
+ // Then
+ assertThat(result).isNotNull();
+ assertThat(result.getId()).isEqualTo(123);
+ assertThat(result.getName()).isEqualTo("Test Product");
+ assertThat(result.getPrice()).isEqualTo(99.99);
+ }
+
+ @Test
+ void shouldThrowExceptionWhenDeserializingInvalidAvroData() {
+ // Given
+ byte[] invalidAvroData = new byte[] { 1, 2, 3, 4, 5 };
+
+ // When/Then
+ assertThatThrownBy(() -> deserializer.deserializeObject(invalidAvroData, TestProduct.class))
+ .isInstanceOf(IOException.class)
+ .hasMessageContaining("Failed to deserialize Avro data");
+ }
+
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java
new file mode 100644
index 000000000..0cfb2498b
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import software.amazon.lambda.powertools.kafka.testutils.TestProductPojo;
+
+class KafkaJsonDeserializerTest {
+
+ private KafkaJsonDeserializer deserializer;
+ private static final ObjectMapper objectMapper = new ObjectMapper();
+
+ @BeforeEach
+ void setUp() {
+ deserializer = new KafkaJsonDeserializer();
+ }
+
+ @Test
+ void shouldThrowExceptionWhenTypeIsNotSupportedForJson() {
+ // Given
+ byte[] data = new byte[] { 1, 2, 3 };
+
+ // When/Then
+ assertThatThrownBy(() -> deserializer.deserializeObject(data, Object.class))
+ .isInstanceOf(JsonParseException.class);
+ }
+
+ @Test
+ void shouldDeserializeValidJsonData() throws IOException {
+ // Given
+ TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2"));
+ byte[] jsonData = objectMapper.writeValueAsBytes(product);
+
+ // When
+ TestProductPojo result = deserializer.deserializeObject(jsonData, TestProductPojo.class);
+
+ // Then
+ assertThat(result).isNotNull();
+ assertThat(result.getId()).isEqualTo(123);
+ assertThat(result.getName()).isEqualTo("Test Product");
+ assertThat(result.getPrice()).isEqualTo(99.99);
+ assertThat(result.getTags()).containsExactly("tag1", "tag2");
+ }
+
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java
new file mode 100644
index 000000000..2d506de4b
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.serializers;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+import java.io.IOException;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct;
+
+class KafkaProtobufDeserializerTest {
+
+ private KafkaProtobufDeserializer deserializer;
+
+ @BeforeEach
+ void setUp() {
+ deserializer = new KafkaProtobufDeserializer();
+ }
+
+ @Test
+ void shouldThrowExceptionWhenTypeIsNotProtobufMessage() {
+ // Given
+ byte[] data = new byte[] { 1, 2, 3 };
+
+ // When/Then
+ assertThatThrownBy(() -> deserializer.deserializeObject(data, String.class))
+ .isInstanceOf(IOException.class)
+ .hasMessageContaining("Unsupported type for Protobuf deserialization");
+ }
+
+ @Test
+ void shouldDeserializeValidProtobufData() throws IOException {
+ // Given
+ TestProduct product = TestProduct.newBuilder()
+ .setId(123)
+ .setName("Test Product")
+ .setPrice(99.99)
+ .build();
+ byte[] protobufData = product.toByteArray();
+
+ // When
+ TestProduct result = deserializer.deserializeObject(protobufData, TestProduct.class);
+
+ // Then
+ assertThat(result).isNotNull();
+ assertThat(result.getId()).isEqualTo(123);
+ assertThat(result.getName()).isEqualTo("Test Product");
+ assertThat(result.getPrice()).isEqualTo(99.99);
+ }
+
+ @Test
+ void shouldThrowExceptionWhenDeserializingInvalidProtobufData() {
+ // Given
+ byte[] invalidProtobufData = new byte[] { 1, 2, 3, 4, 5 };
+
+ // When/Then
+ assertThatThrownBy(() -> deserializer.deserializeObject(invalidProtobufData, TestProduct.class))
+ .isInstanceOf(IOException.class)
+ .hasMessageContaining("Failed to deserialize Protobuf data");
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/AvroHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/AvroHandler.java
new file mode 100644
index 000000000..d0fc9c1ba
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/AvroHandler.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.testutils;
+
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+import software.amazon.lambda.powertools.kafka.Deserialization;
+import software.amazon.lambda.powertools.kafka.DeserializationType;
+import software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct;
+
+public class AvroHandler implements RequestHandler, String> {
+ @Override
+ @Deserialization(type = DeserializationType.KAFKA_AVRO)
+ public String handleRequest(ConsumerRecords input, Context context) {
+ return "OK";
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/DefaultHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/DefaultHandler.java
new file mode 100644
index 000000000..31e93d872
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/DefaultHandler.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.testutils;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+import software.amazon.lambda.powertools.kafka.Deserialization;
+import software.amazon.lambda.powertools.kafka.DeserializationType;
+
+// This is a non-Kafka specific handler. Just a handler using default deserialization into a Pojo. Used for testing
+// fallback to default Lambda serialization.
+public class DefaultHandler implements RequestHandler {
+ @Override
+ @Deserialization(type = DeserializationType.LAMBDA_DEFAULT)
+ public String handleRequest(TestProductPojo input, Context context) {
+ return "OK";
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/InputStreamHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/InputStreamHandler.java
new file mode 100644
index 000000000..63e225ab8
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/InputStreamHandler.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.testutils;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+public class InputStreamHandler implements RequestHandler {
+ @Override
+ public String handleRequest(InputStream input, Context context) {
+ try {
+ return new String(input.readAllBytes());
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to read input stream", e);
+ }
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/JsonHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/JsonHandler.java
new file mode 100644
index 000000000..b6422f73c
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/JsonHandler.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.testutils;
+
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+import software.amazon.lambda.powertools.kafka.Deserialization;
+import software.amazon.lambda.powertools.kafka.DeserializationType;
+
+public class JsonHandler implements RequestHandler, String> {
+ @Override
+ @Deserialization(type = DeserializationType.KAFKA_JSON)
+ public String handleRequest(ConsumerRecords input, Context context) {
+ return "OK";
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/ProtobufHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/ProtobufHandler.java
new file mode 100644
index 000000000..a4ce61765
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/ProtobufHandler.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.testutils;
+
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+import software.amazon.lambda.powertools.kafka.Deserialization;
+import software.amazon.lambda.powertools.kafka.DeserializationType;
+import software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct;
+
+public class ProtobufHandler implements RequestHandler, String> {
+ @Override
+ @Deserialization(type = DeserializationType.KAFKA_PROTOBUF)
+ public String handleRequest(ConsumerRecords input, Context context) {
+ return "OK";
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/StringHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/StringHandler.java
new file mode 100644
index 000000000..3ac5649f1
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/StringHandler.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.testutils;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+
+public class StringHandler implements RequestHandler {
+ @Override
+ public String handleRequest(String input, Context context) {
+ return input;
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestProductPojo.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestProductPojo.java
new file mode 100644
index 000000000..8cd261aef
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestProductPojo.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.testutils;
+
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Simple POJO for testing JSON deserialization
+ */
+public class TestProductPojo {
+ private int id;
+ private String name;
+ private double price;
+ private List tags;
+
+ // Default constructor required for Jackson
+ public TestProductPojo() {
+ }
+
+ public TestProductPojo(int id, String name, double price, List tags) {
+ this.id = id;
+ this.name = name;
+ this.price = price;
+ this.tags = tags;
+ }
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public double getPrice() {
+ return price;
+ }
+
+ public void setPrice(double price) {
+ this.price = price;
+ }
+
+ public List getTags() {
+ return tags;
+ }
+
+ public void setTags(List tags) {
+ this.tags = tags;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+ TestProductPojo that = (TestProductPojo) o;
+ return id == that.id &&
+ Double.compare(that.price, price) == 0 &&
+ Objects.equals(name, that.name) &&
+ Objects.equals(tags, that.tags);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, name, price, tags);
+ }
+}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java
new file mode 100644
index 000000000..33623a9b2
--- /dev/null
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2023 Amazon.com, Inc. or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package software.amazon.lambda.powertools.kafka.testutils;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.avro.specific.SpecificRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+
+/**
+ * Utility class for common test functions
+ */
+public class TestUtils {
+
+ /**
+ * Helper method to create a ParameterizedType for ConsumerRecords
+ *
+ * @param keyClass The class for the key type
+ * @param valueClass The class for the value type
+ * @return A Type representing ConsumerRecords
+ */
+ public static Type createConsumerRecordsType(final Class> keyClass, final Class> valueClass) {
+ return new ParameterizedType() {
+ @Override
+ public Type[] getActualTypeArguments() {
+ return new Type[] { keyClass, valueClass };
+ }
+
+ @Override
+ public Type getRawType() {
+ return ConsumerRecords.class;
+ }
+
+ @Override
+ public Type getOwnerType() {
+ return null;
+ }
+ };
+ }
+
+ /**
+ * Helper method to serialize an Avro object
+ *
+ * @param The type of the Avro record
+ * @param consumerRecord The Avro record to serialize
+ * @return The serialized bytes
+ * @throws IOException If serialization fails
+ */
+ public static byte[] serializeAvro(T consumerRecord) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
+ @SuppressWarnings("unchecked")
+ DatumWriter writer = new SpecificDatumWriter<>((Class) consumerRecord.getClass());
+ writer.write(consumerRecord, encoder);
+ encoder.flush();
+ return baos.toByteArray();
+ }
+}
diff --git a/powertools-kafka/src/test/proto/TestProduct.proto b/powertools-kafka/src/test/proto/TestProduct.proto
new file mode 100644
index 000000000..53c654494
--- /dev/null
+++ b/powertools-kafka/src/test/proto/TestProduct.proto
@@ -0,0 +1,13 @@
+syntax = "proto3";
+
+package software.amazon.lambda.powertools.kafka.serializers.test.protobuf;
+
+option java_package = "software.amazon.lambda.powertools.kafka.serializers.test.protobuf";
+option java_outer_classname = "TestProductOuterClass";
+option java_multiple_files = true;
+
+message TestProduct {
+ int32 id = 1;
+ string name = 2;
+ double price = 3;
+}
\ No newline at end of file
diff --git a/powertools-kafka/src/test/resources/simplelogger.properties b/powertools-kafka/src/test/resources/simplelogger.properties
new file mode 100644
index 000000000..167581f74
--- /dev/null
+++ b/powertools-kafka/src/test/resources/simplelogger.properties
@@ -0,0 +1,13 @@
+# SLF4J Simple Logger configuration for tests
+org.slf4j.simpleLogger.defaultLogLevel=debug
+org.slf4j.simpleLogger.showDateTime=true
+org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss.SSS
+org.slf4j.simpleLogger.showThreadName=true
+org.slf4j.simpleLogger.showLogName=true
+org.slf4j.simpleLogger.showShortLogName=false
+
+# Redirect logs to a file instead of console to avoid bloated console output during tests
+org.slf4j.simpleLogger.logFile=target/test.log
+
+# Set specific logger levels
+org.slf4j.simpleLogger.log.software.amazon.lambda.powertools=debug
diff --git a/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaEcsEncoderTest.java b/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaEcsEncoderTest.java
index 5dcca2fb2..7e8977508 100644
--- a/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaEcsEncoderTest.java
+++ b/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaEcsEncoderTest.java
@@ -150,7 +150,7 @@ void shouldLogException() {
result = new String(encoded, StandardCharsets.UTF_8);
// THEN (stack is logged with root cause first)
- assertThat(result).contains("\"message\":\"Error\",\"error.message\":\"Unexpected value\",\"error.type\":\"java.lang.IllegalStateException\",\"error.stack_trace\":\"java.lang.IllegalStateException: Unexpected value\n");
+ assertThat(result).contains("\"message\":\"Error\",\"error.message\":\"Unexpected value\",\"error.type\":\"java.lang.IllegalStateException\",\"error.stack_trace\":\"java.lang.IllegalStateException: Unexpected value\\n");
}
private void setMDC() {
diff --git a/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaJsonEncoderTest.java b/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaJsonEncoderTest.java
index 4a7067540..81e830045 100644
--- a/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaJsonEncoderTest.java
+++ b/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaJsonEncoderTest.java
@@ -419,7 +419,7 @@ void shouldLogException() {
// THEN (stack is logged with root cause first)
assertThat(result).contains("\"message\":\"Unexpected value\"")
.contains("\"name\":\"java.lang.IllegalStateException\"")
- .contains("\"stack\":\"java.lang.IllegalStateException: Unexpected value\n");
+ .contains("\"stack\":\"java.lang.IllegalStateException: Unexpected value\\n");
}
private void setupContext() {
diff --git a/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/JsonSerializer.java b/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/JsonSerializer.java
index 82bc76a38..c69789519 100644
--- a/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/JsonSerializer.java
+++ b/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/JsonSerializer.java
@@ -84,8 +84,15 @@ public void writeString(String text) {
if (text == null) {
writeNull();
} else {
- // Escape double quotes to avoid breaking JSON format
- builder.append("\"").append(text.replace("\"", "\\\"")).append("\"");
+ // Escape special characters to avoid breaking JSON format
+ String escaped = text.replace("\\", "\\\\")
+ .replace("\"", "\\\"")
+ .replace("\n", "\\n")
+ .replace("\r", "\\r")
+ .replace("\t", "\\t")
+ .replace("\b", "\\b")
+ .replace("\f", "\\f");
+ builder.append("\"").append(escaped).append("\"");
}
}
From f563d2349d5a569d5a6996d9404023fdceed64a3 Mon Sep 17 00:00:00 2001
From: Philipp Page
Date: Thu, 19 Jun 2025 15:09:17 +0200
Subject: [PATCH 7/9] fix(kafka): Add support for confluent message indices.
(#1902)
* fix(kafka): Add support for confluent message indices.
* Make Generator classes non-instantiable (they are static utility classes).
* Make generator classes final.
---
.../events/kafka-protobuf-event.json | 4 +-
.../powertools-examples-kafka/tools/README.md | 9 +-
.../demo/kafka/tools/GenerateAvroSamples.java | 34 ++---
.../demo/kafka/tools/GenerateJsonSamples.java | 6 +-
.../kafka/tools/GenerateProtobufSamples.java | 117 ++++++++++++------
.../KafkaProtobufDeserializer.java | 58 ++++++++-
.../KafkaProtobufDeserializerTest.java | 77 ++++++++++++
7 files changed, 246 insertions(+), 59 deletions(-)
diff --git a/examples/powertools-examples-kafka/events/kafka-protobuf-event.json b/examples/powertools-examples-kafka/events/kafka-protobuf-event.json
index b3e0139e3..e0547ad88 100644
--- a/examples/powertools-examples-kafka/events/kafka-protobuf-event.json
+++ b/examples/powertools-examples-kafka/events/kafka-protobuf-event.json
@@ -25,7 +25,7 @@
"timestamp": 1545084650988,
"timestampType": "CREATE_TIME",
"key": "NDI=",
- "value": "COoHEgpTbWFydHBob25lGVK4HoXrv4JA",
+ "value": "AAjpBxIGTGFwdG9wGVK4HoXrP49A",
"headers": [
{
"headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
@@ -39,7 +39,7 @@
"timestamp": 1545084650989,
"timestampType": "CREATE_TIME",
"key": null,
- "value": "COsHEgpIZWFkcGhvbmVzGUjhehSuv2JA",
+ "value": "AgEACOkHEgZMYXB0b3AZUrgehes/j0A=",
"headers": [
{
"headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]
diff --git a/examples/powertools-examples-kafka/tools/README.md b/examples/powertools-examples-kafka/tools/README.md
index 53d07b0c4..02e8dde9b 100644
--- a/examples/powertools-examples-kafka/tools/README.md
+++ b/examples/powertools-examples-kafka/tools/README.md
@@ -45,7 +45,7 @@ The tool will output base64-encoded values for Avro products that can be used in
mvn exec:java -Dexec.mainClass="org.demo.kafka.tools.GenerateProtobufSamples"
```
-The tool will output base64-encoded values for Protobuf products that can be used in `../events/kafka-protobuf-event.json`.
+The tool will output base64-encoded values for Protobuf products that can be used in `../events/kafka-protobuf-event.json`. This generator creates samples with and without Confluent message-indexes to test different serialization scenarios.
## Output
@@ -55,6 +55,13 @@ Each generator produces:
2. An integer key (42) and one entry with a nullish key to test for edge-cases
3. A complete sample event structure that can be used directly for testing
+The Protobuf generators additionally create samples with different Confluent message-index formats:
+- Standard protobuf (no message indexes)
+- Simple message index (single 0 byte)
+- Complex message index (length-prefixed array)
+
+For more information about Confluent Schema Registry serialization formats and wire format specifications, see the [Confluent documentation](https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/index.html#wire-format).
+
## Example
After generating the samples, you can copy the output into the respective event files:
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java
index 4bd6ebd13..e6f4d38fd 100644
--- a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java
@@ -14,62 +14,68 @@
* Utility class to generate base64-encoded Avro serialized products
* for use in test events.
*/
-public class GenerateAvroSamples {
+public final class GenerateAvroSamples {
+
+ private GenerateAvroSamples() {
+ // Utility class
+ }
public static void main(String[] args) throws IOException {
// Create three different products
AvroProduct product1 = new AvroProduct(1001, "Laptop", 999.99);
AvroProduct product2 = new AvroProduct(1002, "Smartphone", 599.99);
AvroProduct product3 = new AvroProduct(1003, "Headphones", 149.99);
-
+
// Serialize and encode each product
String encodedProduct1 = serializeAndEncode(product1);
String encodedProduct2 = serializeAndEncode(product2);
String encodedProduct3 = serializeAndEncode(product3);
-
+
// Serialize and encode an integer key
String encodedKey = serializeAndEncodeInteger(42);
-
+
// Print the results
System.out.println("Base64 encoded Avro products for use in kafka-avro-event.json:");
System.out.println("\nProduct 1 (with key):");
System.out.println("key: \"" + encodedKey + "\",");
System.out.println("value: \"" + encodedProduct1 + "\",");
-
+
System.out.println("\nProduct 2 (with key):");
System.out.println("key: \"" + encodedKey + "\",");
System.out.println("value: \"" + encodedProduct2 + "\",");
-
+
System.out.println("\nProduct 3 (without key):");
System.out.println("key: null,");
System.out.println("value: \"" + encodedProduct3 + "\",");
-
+
// Print a sample event structure
System.out.println("\nSample event structure:");
printSampleEvent(encodedKey, encodedProduct1, encodedProduct2, encodedProduct3);
}
-
+
private static String serializeAndEncode(AvroProduct product) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
DatumWriter writer = new SpecificDatumWriter<>(AvroProduct.class);
-
+
writer.write(product, encoder);
encoder.flush();
-
+
return Base64.getEncoder().encodeToString(baos.toByteArray());
}
-
+
private static String serializeAndEncodeInteger(Integer value) throws IOException {
// For simple types like integers, we'll just convert to string and encode
return Base64.getEncoder().encodeToString(value.toString().getBytes());
}
-
+
private static void printSampleEvent(String key, String product1, String product2, String product3) {
System.out.println("{\n" +
" \"eventSource\": \"aws:kafka\",\n" +
- " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n" +
- " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n" +
+ " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n"
+ +
+ " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n"
+ +
" \"records\": {\n" +
" \"mytopic-0\": [\n" +
" {\n" +
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java
index a4fd6565a..d0ef7cb55 100644
--- a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java
@@ -11,7 +11,11 @@
* Utility class to generate base64-encoded JSON serialized products
* for use in test events.
*/
-public class GenerateJsonSamples {
+public final class GenerateJsonSamples {
+
+ private GenerateJsonSamples() {
+ // Utility class
+ }
public static void main(String[] args) throws IOException {
// Create three different products
diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java
index ae078a28a..eecd3e1cc 100644
--- a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java
+++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java
@@ -1,73 +1,110 @@
package org.demo.kafka.tools;
+import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Base64;
import org.demo.kafka.protobuf.ProtobufProduct;
+import com.google.protobuf.CodedOutputStream;
+
/**
* Utility class to generate base64-encoded Protobuf serialized products
* for use in test events.
*/
-public class GenerateProtobufSamples {
+public final class GenerateProtobufSamples {
+
+ private GenerateProtobufSamples() {
+ // Utility class
+ }
public static void main(String[] args) throws IOException {
- // Create three different products
- ProtobufProduct product1 = ProtobufProduct.newBuilder()
+ // Create a single product that will be used for all three scenarios
+ ProtobufProduct product = ProtobufProduct.newBuilder()
.setId(1001)
.setName("Laptop")
.setPrice(999.99)
.build();
- ProtobufProduct product2 = ProtobufProduct.newBuilder()
- .setId(1002)
- .setName("Smartphone")
- .setPrice(599.99)
- .build();
-
- ProtobufProduct product3 = ProtobufProduct.newBuilder()
- .setId(1003)
- .setName("Headphones")
- .setPrice(149.99)
- .build();
-
- // Serialize and encode each product
- String encodedProduct1 = serializeAndEncode(product1);
- String encodedProduct2 = serializeAndEncode(product2);
- String encodedProduct3 = serializeAndEncode(product3);
+ // Create three different serializations of the same product
+ String standardProduct = serializeAndEncode(product);
+ String productWithSimpleIndex = serializeWithSimpleMessageIndex(product);
+ String productWithComplexIndex = serializeWithComplexMessageIndex(product);
- // Serialize and encode an integer key
+ // Serialize and encode an integer key (same for all records)
String encodedKey = serializeAndEncodeInteger(42);
// Print the results
- System.out.println("Base64 encoded Protobuf products for use in kafka-protobuf-event.json:");
- System.out.println("\nProduct 1 (with key):");
- System.out.println("key: \"" + encodedKey + "\",");
- System.out.println("value: \"" + encodedProduct1 + "\",");
-
- System.out.println("\nProduct 2 (with key):");
- System.out.println("key: \"" + encodedKey + "\",");
- System.out.println("value: \"" + encodedProduct2 + "\",");
-
- System.out.println("\nProduct 3 (without key):");
- System.out.println("key: null,");
- System.out.println("value: \"" + encodedProduct3 + "\",");
-
- // Print a sample event structure
- System.out.println("\nSample event structure:");
- printSampleEvent(encodedKey, encodedProduct1, encodedProduct2, encodedProduct3);
+ System.out.println("Base64 encoded Protobuf products with different message index scenarios:");
+ System.out.println("\n1. Standard Protobuf (no message index):");
+ System.out.println("value: \"" + standardProduct + "\"");
+
+ System.out.println("\n2. Simple Message Index (single 0):");
+ System.out.println("value: \"" + productWithSimpleIndex + "\"");
+
+ System.out.println("\n3. Complex Message Index (array [1,0]):");
+ System.out.println("value: \"" + productWithComplexIndex + "\"");
+
+ // Print the merged event structure
+ System.out.println("\n" + "=".repeat(80));
+ System.out.println("MERGED EVENT WITH ALL THREE SCENARIOS");
+ System.out.println("=".repeat(80));
+ printSampleEvent(encodedKey, standardProduct, productWithSimpleIndex, productWithComplexIndex);
}
private static String serializeAndEncode(ProtobufProduct product) {
return Base64.getEncoder().encodeToString(product.toByteArray());
}
+ /**
+ * Serializes a protobuf product with a simple Confluent message index (single 0).
+ * Format: [0][protobuf_data]
+ *
+ * @see {@link https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/index.html#wire-format}
+ */
+ private static String serializeWithSimpleMessageIndex(ProtobufProduct product) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CodedOutputStream codedOutput = CodedOutputStream.newInstance(baos);
+
+ // Write simple message index (single 0)
+ codedOutput.writeUInt32NoTag(0);
+
+ // Write the protobuf data
+ product.writeTo(codedOutput);
+
+ codedOutput.flush();
+ return Base64.getEncoder().encodeToString(baos.toByteArray());
+ }
+
+ /**
+ * Serializes a protobuf product with a complex Confluent message index (array [1,0]).
+ * Format: [2][1][0][protobuf_data] where 2 is the array length
+ *
+ * @see {@link https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/index.html#wire-format}
+ */
+ private static String serializeWithComplexMessageIndex(ProtobufProduct product) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CodedOutputStream codedOutput = CodedOutputStream.newInstance(baos);
+
+ // Write complex message index array [1,0]
+ codedOutput.writeUInt32NoTag(2); // Array length
+ codedOutput.writeUInt32NoTag(1); // First index value
+ codedOutput.writeUInt32NoTag(0); // Second index value
+
+ // Write the protobuf data
+ product.writeTo(codedOutput);
+
+ codedOutput.flush();
+ return Base64.getEncoder().encodeToString(baos.toByteArray());
+ }
+
private static String serializeAndEncodeInteger(Integer value) {
// For simple types like integers, we'll just convert to string and encode
return Base64.getEncoder().encodeToString(value.toString().getBytes());
}
- private static void printSampleEvent(String key, String product1, String product2, String product3) {
+ private static void printSampleEvent(String key, String standardProduct, String simpleIndexProduct,
+ String complexIndexProduct) {
System.out.println("{\n" +
" \"eventSource\": \"aws:kafka\",\n" +
" \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n"
@@ -83,7 +120,7 @@ private static void printSampleEvent(String key, String product1, String product
" \"timestamp\": 1545084650987,\n" +
" \"timestampType\": \"CREATE_TIME\",\n" +
" \"key\": \"" + key + "\",\n" +
- " \"value\": \"" + product1 + "\",\n" +
+ " \"value\": \"" + standardProduct + "\",\n" +
" \"headers\": [\n" +
" {\n" +
" \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
@@ -97,7 +134,7 @@ private static void printSampleEvent(String key, String product1, String product
" \"timestamp\": 1545084650988,\n" +
" \"timestampType\": \"CREATE_TIME\",\n" +
" \"key\": \"" + key + "\",\n" +
- " \"value\": \"" + product2 + "\",\n" +
+ " \"value\": \"" + simpleIndexProduct + "\",\n" +
" \"headers\": [\n" +
" {\n" +
" \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
@@ -111,7 +148,7 @@ private static void printSampleEvent(String key, String product1, String product
" \"timestamp\": 1545084650989,\n" +
" \"timestampType\": \"CREATE_TIME\",\n" +
" \"key\": null,\n" +
- " \"value\": \"" + product3 + "\",\n" +
+ " \"value\": \"" + complexIndexProduct + "\",\n" +
" \"headers\": [\n" +
" {\n" +
" \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" +
diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java
index 025f203c4..c15be552f 100644
--- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java
+++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java
@@ -13,14 +13,27 @@
package software.amazon.lambda.powertools.kafka.serializers;
import java.io.IOException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.protobuf.CodedInputStream;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
/**
* Deserializer for Kafka records using Protocol Buffers format.
+ * Supports both standard protobuf serialization and Confluent Schema Registry serialization using messages indices.
+ *
+ * For Confluent-serialized data, assumes the magic byte and schema ID have already been stripped
+ * by the Kafka ESM, leaving only the message index (if present) and protobuf data.
+ *
+ * @see {@link https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/index.html#wire-format}
*/
public class KafkaProtobufDeserializer extends AbstractKafkaDeserializer {
+ private static final Logger LOGGER = LoggerFactory.getLogger(KafkaProtobufDeserializer.class);
+
@Override
@SuppressWarnings("unchecked")
protected T deserializeObject(byte[] data, Class type) throws IOException {
@@ -29,7 +42,9 @@ protected T deserializeObject(byte[] data, Class type) throws IOException
try {
// Get the parser from the generated Protobuf class
Parser parser = (Parser) type.getMethod("parser").invoke(null);
- Message message = parser.parseFrom(data);
+
+ // Try to deserialize the data, handling potential Confluent message indices
+ Message message = deserializeWithMessageIndexHandling(data, parser);
return type.cast(message);
} catch (Exception e) {
throw new IOException("Failed to deserialize Protobuf data.", e);
@@ -40,4 +55,45 @@ protected T deserializeObject(byte[] data, Class type) throws IOException
+ "Consider using an alternative Deserializer.");
}
}
+
+ private Message deserializeWithMessageIndexHandling(byte[] data, Parser parser) throws IOException {
+ try {
+ LOGGER.debug("Attempting to deserialize as standard protobuf data");
+ return parser.parseFrom(data);
+ } catch (Exception e) {
+ LOGGER.debug("Standard protobuf parsing failed, attempting Confluent message-index handling");
+ return deserializeWithMessageIndex(data, parser);
+ }
+ }
+
+ private Message deserializeWithMessageIndex(byte[] data, Parser parser) throws IOException {
+ CodedInputStream codedInputStream = CodedInputStream.newInstance(data);
+
+ try {
+ // https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/index.html#wire-format
+ // Read the first varint - this could be:
+ // 1. A single 0 (simple case - first message type)
+ // 2. The length of the message index array (complex case)
+ int firstValue = codedInputStream.readUInt32();
+
+ if (firstValue == 0) {
+ // Simple case: Single 0 byte means first message type
+ LOGGER.debug("Found simple message-index case (single 0), parsing remaining data as protobuf");
+ return parser.parseFrom(codedInputStream);
+ } else {
+ // Complex case: firstValue is the length of the message index array
+ LOGGER.debug("Found complex message-index case with array length: {}, skipping {} message index values",
+ firstValue, firstValue);
+ for (int i = 0; i < firstValue; i++) {
+ codedInputStream.readUInt32(); // Skip each message index value
+ }
+ // Now the remaining data should be the actual protobuf message
+ LOGGER.debug("Finished skipping message indexes, parsing remaining data as protobuf");
+ return parser.parseFrom(codedInputStream);
+ }
+
+ } catch (Exception e) {
+ throw new IOException("Failed to parse protobuf data with or without message index", e);
+ }
+ }
}
diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java
index 2d506de4b..3315e1172 100644
--- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java
+++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java
@@ -15,11 +15,14 @@
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import com.google.protobuf.CodedOutputStream;
+
import software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct;
class KafkaProtobufDeserializerTest {
@@ -72,4 +75,78 @@ void shouldThrowExceptionWhenDeserializingInvalidProtobufData() {
.isInstanceOf(IOException.class)
.hasMessageContaining("Failed to deserialize Protobuf data");
}
+
+ @Test
+ void shouldDeserializeProtobufDataWithSimpleMessageIndex() throws IOException {
+ // Given
+ TestProduct product = TestProduct.newBuilder()
+ .setId(456)
+ .setName("Simple Index Product")
+ .setPrice(199.99)
+ .build();
+
+ // Create protobuf data with simple message index (single 0)
+ byte[] protobufDataWithSimpleIndex = createProtobufDataWithSimpleMessageIndex(product);
+
+ // When
+ TestProduct result = deserializer.deserializeObject(protobufDataWithSimpleIndex, TestProduct.class);
+
+ // Then
+ assertThat(result).isNotNull();
+ assertThat(result.getId()).isEqualTo(456);
+ assertThat(result.getName()).isEqualTo("Simple Index Product");
+ assertThat(result.getPrice()).isEqualTo(199.99);
+ }
+
+ @Test
+ void shouldDeserializeProtobufDataWithComplexMessageIndex() throws IOException {
+ // Given
+ TestProduct product = TestProduct.newBuilder()
+ .setId(789)
+ .setName("Complex Index Product")
+ .setPrice(299.99)
+ .build();
+
+ // Create protobuf data with complex message index (array [1,0])
+ byte[] protobufDataWithComplexIndex = createProtobufDataWithComplexMessageIndex(product);
+
+ // When
+ TestProduct result = deserializer.deserializeObject(protobufDataWithComplexIndex, TestProduct.class);
+
+ // Then
+ assertThat(result).isNotNull();
+ assertThat(result.getId()).isEqualTo(789);
+ assertThat(result.getName()).isEqualTo("Complex Index Product");
+ assertThat(result.getPrice()).isEqualTo(299.99);
+ }
+
+ private byte[] createProtobufDataWithSimpleMessageIndex(TestProduct product) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CodedOutputStream codedOutput = CodedOutputStream.newInstance(baos);
+
+ // Write simple message index (single 0)
+ codedOutput.writeUInt32NoTag(0);
+
+ // Write the protobuf data
+ product.writeTo(codedOutput);
+
+ codedOutput.flush();
+ return baos.toByteArray();
+ }
+
+ private byte[] createProtobufDataWithComplexMessageIndex(TestProduct product) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CodedOutputStream codedOutput = CodedOutputStream.newInstance(baos);
+
+ // Write complex message index array [1,0]
+ codedOutput.writeUInt32NoTag(2); // Array length
+ codedOutput.writeUInt32NoTag(1); // First index value
+ codedOutput.writeUInt32NoTag(0); // Second index value
+
+ // Write the protobuf data
+ product.writeTo(codedOutput);
+
+ codedOutput.flush();
+ return baos.toByteArray();
+ }
}
From cf01e91eb0b398b89b8fb442edfe27e1532a5810 Mon Sep 17 00:00:00 2001
From: Philipp Page
Date: Thu, 19 Jun 2025 15:31:48 +0200
Subject: [PATCH 8/9] fix(ci): Add maven project description to Kafka utility.
(#1903)
---
powertools-kafka/pom.xml | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/powertools-kafka/pom.xml b/powertools-kafka/pom.xml
index f5b80012c..f96c1eb82 100644
--- a/powertools-kafka/pom.xml
+++ b/powertools-kafka/pom.xml
@@ -28,7 +28,10 @@
jarPowertools for AWS Lambda (Java) - Kafka Consumer
-
+
+ The Kafka utility transparently handles message deserialization, provides an intuitive developer experience,
+ and integrates seamlessly with the rest of the Powertools for AWS Lambda ecosystem.
+ 4.0.0
From 33748d46eacbf41528452efe5f9f98defdfd1ddd Mon Sep 17 00:00:00 2001
From: "Powertools for AWS Lambda (Java) Bot"
<151832416+aws-powertools-bot@users.noreply.github.com>
Date: Thu, 19 Jun 2025 13:41:20 +0000
Subject: [PATCH 9/9] chore(ci): bump version to 2.1.0
---
README.md | 6 +++---
examples/pom.xml | 2 +-
examples/powertools-examples-batch/pom.xml | 2 +-
examples/powertools-examples-cloudformation/pom.xml | 2 +-
examples/powertools-examples-core-utilities/cdk/app/pom.xml | 2 +-
.../powertools-examples-core-utilities/cdk/infra/pom.xml | 2 +-
.../powertools-examples-core-utilities/gradle/build.gradle | 6 +++---
.../kotlin/build.gradle.kts | 6 +++---
.../powertools-examples-core-utilities/sam-graalvm/pom.xml | 2 +-
examples/powertools-examples-core-utilities/sam/pom.xml | 2 +-
.../powertools-examples-core-utilities/serverless/pom.xml | 2 +-
.../powertools-examples-core-utilities/terraform/pom.xml | 2 +-
examples/powertools-examples-idempotency/pom.xml | 2 +-
examples/powertools-examples-kafka/pom.xml | 2 +-
examples/powertools-examples-parameters/sam-graalvm/pom.xml | 2 +-
examples/powertools-examples-parameters/sam/pom.xml | 2 +-
examples/powertools-examples-serialization/pom.xml | 2 +-
examples/powertools-examples-validation/pom.xml | 2 +-
mkdocs.yml | 2 +-
pom.xml | 2 +-
powertools-batch/pom.xml | 2 +-
powertools-cloudformation/pom.xml | 2 +-
powertools-common/pom.xml | 2 +-
powertools-e2e-tests/pom.xml | 2 +-
powertools-idempotency/pom.xml | 2 +-
powertools-idempotency/powertools-idempotency-core/pom.xml | 2 +-
.../powertools-idempotency-dynamodb/pom.xml | 2 +-
powertools-kafka/pom.xml | 2 +-
powertools-large-messages/pom.xml | 2 +-
powertools-logging/pom.xml | 2 +-
powertools-logging/powertools-logging-log4j/pom.xml | 2 +-
powertools-logging/powertools-logging-logback/pom.xml | 2 +-
powertools-metrics/pom.xml | 2 +-
powertools-parameters/pom.xml | 2 +-
.../powertools-parameters-appconfig/pom.xml | 2 +-
.../powertools-parameters-dynamodb/pom.xml | 2 +-
powertools-parameters/powertools-parameters-secrets/pom.xml | 2 +-
powertools-parameters/powertools-parameters-ssm/pom.xml | 2 +-
powertools-parameters/powertools-parameters-tests/pom.xml | 2 +-
powertools-serialization/pom.xml | 2 +-
powertools-tracing/pom.xml | 2 +-
powertools-validation/pom.xml | 2 +-
42 files changed, 48 insertions(+), 48 deletions(-)
diff --git a/README.md b/README.md
index 88955bca7..165458a65 100644
--- a/README.md
+++ b/README.md
@@ -22,17 +22,17 @@ Powertools for AWS Lambda (Java) is available in Maven Central. You can use your
software.amazon.lambdapowertools-tracing
- 2.0.0
+ 2.1.0software.amazon.lambdapowertools-logging
- 2.0.0
+ 2.1.0software.amazon.lambdapowertools-metrics
- 2.0.0
+ 2.1.0
...
diff --git a/examples/pom.xml b/examples/pom.xml
index ea1e8d542..496c933c3 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -20,7 +20,7 @@
software.amazon.lambdapowertools-examples
- 2.0.0
+ 2.1.0pomPowertools for AWS Lambda (Java) - Examples
diff --git a/examples/powertools-examples-batch/pom.xml b/examples/powertools-examples-batch/pom.xml
index ef3c0e4e0..1cf1e6f65 100644
--- a/examples/powertools-examples-batch/pom.xml
+++ b/examples/powertools-examples-batch/pom.xml
@@ -5,7 +5,7 @@
4.0.0software.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-batchjarPowertools for AWS Lambda (Java) - Examples - Batch
diff --git a/examples/powertools-examples-cloudformation/pom.xml b/examples/powertools-examples-cloudformation/pom.xml
index 43c81b9f8..74a0090fe 100644
--- a/examples/powertools-examples-cloudformation/pom.xml
+++ b/examples/powertools-examples-cloudformation/pom.xml
@@ -3,7 +3,7 @@
4.0.0software.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-cloudformationjar
diff --git a/examples/powertools-examples-core-utilities/cdk/app/pom.xml b/examples/powertools-examples-core-utilities/cdk/app/pom.xml
index fa5a1927a..0c4dec217 100644
--- a/examples/powertools-examples-core-utilities/cdk/app/pom.xml
+++ b/examples/powertools-examples-core-utilities/cdk/app/pom.xml
@@ -6,7 +6,7 @@
software.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-core-utilities-cdkjar
diff --git a/examples/powertools-examples-core-utilities/cdk/infra/pom.xml b/examples/powertools-examples-core-utilities/cdk/infra/pom.xml
index b869a5672..2ddd389a9 100644
--- a/examples/powertools-examples-core-utilities/cdk/infra/pom.xml
+++ b/examples/powertools-examples-core-utilities/cdk/infra/pom.xml
@@ -4,7 +4,7 @@
4.0.0software.amazon.lambda.examplescdk
- 2.0.0
+ 2.1.0UTF-82.162.1
diff --git a/examples/powertools-examples-core-utilities/gradle/build.gradle b/examples/powertools-examples-core-utilities/gradle/build.gradle
index 4cf988a6f..37d9f4554 100644
--- a/examples/powertools-examples-core-utilities/gradle/build.gradle
+++ b/examples/powertools-examples-core-utilities/gradle/build.gradle
@@ -29,8 +29,8 @@ dependencies {
implementation 'com.amazonaws:aws-lambda-java-events:3.11.0'
implementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.2'
implementation 'org.aspectj:aspectjrt:1.9.20.1'
- aspect 'software.amazon.lambda:powertools-tracing:2.0.0'
- aspect 'software.amazon.lambda:powertools-logging-log4j:2.0.0'
- aspect 'software.amazon.lambda:powertools-metrics:2.0.0'
+ aspect 'software.amazon.lambda:powertools-tracing:2.1.0'
+ aspect 'software.amazon.lambda:powertools-logging-log4j:2.1.0'
+ aspect 'software.amazon.lambda:powertools-metrics:2.1.0'
}
diff --git a/examples/powertools-examples-core-utilities/kotlin/build.gradle.kts b/examples/powertools-examples-core-utilities/kotlin/build.gradle.kts
index 7029dc458..de820300d 100644
--- a/examples/powertools-examples-core-utilities/kotlin/build.gradle.kts
+++ b/examples/powertools-examples-core-utilities/kotlin/build.gradle.kts
@@ -15,9 +15,9 @@ dependencies {
implementation("com.amazonaws:aws-lambda-java-events:3.11.3")
implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.15.2")
implementation("org.aspectj:aspectjrt:1.9.20.1")
- aspect("software.amazon.lambda:powertools-tracing:2.0.0")
- aspect("software.amazon.lambda:powertools-logging-log4j:2.0.0")
- aspect("software.amazon.lambda:powertools-metrics:2.0.0")
+ aspect("software.amazon.lambda:powertools-tracing:2.1.0")
+ aspect("software.amazon.lambda:powertools-logging-log4j:2.1.0")
+ aspect("software.amazon.lambda:powertools-metrics:2.1.0")
implementation("org.jetbrains.kotlin:kotlin-stdlib:1.9.24")
}
diff --git a/examples/powertools-examples-core-utilities/sam-graalvm/pom.xml b/examples/powertools-examples-core-utilities/sam-graalvm/pom.xml
index b2353b86e..ead4625f5 100644
--- a/examples/powertools-examples-core-utilities/sam-graalvm/pom.xml
+++ b/examples/powertools-examples-core-utilities/sam-graalvm/pom.xml
@@ -4,7 +4,7 @@
Powertools for AWS Lambda (Java) - Examples - Core Utilities (logging, tracing, metrics) with SAM GraalVMsoftware.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-core-utilities-sam-graalvmjar
diff --git a/examples/powertools-examples-core-utilities/sam/pom.xml b/examples/powertools-examples-core-utilities/sam/pom.xml
index 813fc267f..44f171698 100644
--- a/examples/powertools-examples-core-utilities/sam/pom.xml
+++ b/examples/powertools-examples-core-utilities/sam/pom.xml
@@ -4,7 +4,7 @@
Powertools for AWS Lambda (Java) - Examples - Core Utilities (logging, tracing, metrics) with SAMsoftware.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-core-utilities-samjar
diff --git a/examples/powertools-examples-core-utilities/serverless/pom.xml b/examples/powertools-examples-core-utilities/serverless/pom.xml
index 1aea70820..42d70ba76 100644
--- a/examples/powertools-examples-core-utilities/serverless/pom.xml
+++ b/examples/powertools-examples-core-utilities/serverless/pom.xml
@@ -4,7 +4,7 @@
Powertools for AWS Lambda (Java) - Examples - Core Utilities (logging, tracing, metrics) with Serverlesssoftware.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-core-utilities-serverlessjar
diff --git a/examples/powertools-examples-core-utilities/terraform/pom.xml b/examples/powertools-examples-core-utilities/terraform/pom.xml
index dd4c385c0..a9ab410e3 100644
--- a/examples/powertools-examples-core-utilities/terraform/pom.xml
+++ b/examples/powertools-examples-core-utilities/terraform/pom.xml
@@ -4,7 +4,7 @@
Powertools for AWS Lambda (Java) - Examples - Core Utilities (logging, tracing, metrics) with Terraformsoftware.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-core-utilities-terraformjar
diff --git a/examples/powertools-examples-idempotency/pom.xml b/examples/powertools-examples-idempotency/pom.xml
index c7ceabc57..06a50c16b 100644
--- a/examples/powertools-examples-idempotency/pom.xml
+++ b/examples/powertools-examples-idempotency/pom.xml
@@ -17,7 +17,7 @@
4.0.0software.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-idempotencyjarPowertools for AWS Lambda (Java) - Examples - Idempotency
diff --git a/examples/powertools-examples-kafka/pom.xml b/examples/powertools-examples-kafka/pom.xml
index a745ac75d..185790b64 100644
--- a/examples/powertools-examples-kafka/pom.xml
+++ b/examples/powertools-examples-kafka/pom.xml
@@ -2,7 +2,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0software.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-kafkajarPowertools for AWS Lambda (Java) - Examples - Kafka
diff --git a/examples/powertools-examples-parameters/sam-graalvm/pom.xml b/examples/powertools-examples-parameters/sam-graalvm/pom.xml
index f2ce0f21f..320ed42cd 100644
--- a/examples/powertools-examples-parameters/sam-graalvm/pom.xml
+++ b/examples/powertools-examples-parameters/sam-graalvm/pom.xml
@@ -2,7 +2,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0software.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-parameters-sam-graalvmjarPowertools for AWS Lambda (Java) - Examples - Parameters GraalVM
diff --git a/examples/powertools-examples-parameters/sam/pom.xml b/examples/powertools-examples-parameters/sam/pom.xml
index a797bbeed..ea8029b8c 100644
--- a/examples/powertools-examples-parameters/sam/pom.xml
+++ b/examples/powertools-examples-parameters/sam/pom.xml
@@ -2,7 +2,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0software.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-parameters-samjarPowertools for AWS Lambda (Java) - Examples - Parameters
diff --git a/examples/powertools-examples-serialization/pom.xml b/examples/powertools-examples-serialization/pom.xml
index b96d02b6b..f9408012c 100644
--- a/examples/powertools-examples-serialization/pom.xml
+++ b/examples/powertools-examples-serialization/pom.xml
@@ -2,7 +2,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0software.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-serializationjarPowertools for AWS Lambda (Java) - Examples - Serialization
diff --git a/examples/powertools-examples-validation/pom.xml b/examples/powertools-examples-validation/pom.xml
index 9b6df9783..3b9d9baeb 100644
--- a/examples/powertools-examples-validation/pom.xml
+++ b/examples/powertools-examples-validation/pom.xml
@@ -16,7 +16,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0software.amazon.lambda.examples
- 2.0.0
+ 2.1.0powertools-examples-validationjarPowertools for AWS Lambda (Java) - Examples - Validation
diff --git a/mkdocs.yml b/mkdocs.yml
index 07be3c175..1880e543d 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -121,7 +121,7 @@ extra_javascript:
extra:
powertools:
- version: 2.0.0
+ version: 2.1.0
version:
provider: mike
default: latest
diff --git a/pom.xml b/pom.xml
index f27ffc497..0542295bb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -20,7 +20,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0
+ 2.1.0pomPowertools for AWS Lambda (Java) - Parent
diff --git a/powertools-batch/pom.xml b/powertools-batch/pom.xml
index 59e5b8c93..4f6b78f5e 100644
--- a/powertools-batch/pom.xml
+++ b/powertools-batch/pom.xml
@@ -6,7 +6,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0
+ 2.1.0A suite of utilities that makes batch message processing using AWS Lambda easier.
diff --git a/powertools-cloudformation/pom.xml b/powertools-cloudformation/pom.xml
index 271704dea..657d42e42 100644
--- a/powertools-cloudformation/pom.xml
+++ b/powertools-cloudformation/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0Powertools for AWS Lambda (Java) - Cloudformation
diff --git a/powertools-common/pom.xml b/powertools-common/pom.xml
index dd6ef8e61..8e0229bb4 100644
--- a/powertools-common/pom.xml
+++ b/powertools-common/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0Powertools for AWS Lambda (Java) - Common Internal Utilities
diff --git a/powertools-e2e-tests/pom.xml b/powertools-e2e-tests/pom.xml
index 4f1c059e9..c49e6824a 100644
--- a/powertools-e2e-tests/pom.xml
+++ b/powertools-e2e-tests/pom.xml
@@ -20,7 +20,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0powertools-e2e-tests
diff --git a/powertools-idempotency/pom.xml b/powertools-idempotency/pom.xml
index 862cf3160..169953642 100644
--- a/powertools-idempotency/pom.xml
+++ b/powertools-idempotency/pom.xml
@@ -21,7 +21,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0
+ 2.1.0powertools-idempotency
diff --git a/powertools-idempotency/powertools-idempotency-core/pom.xml b/powertools-idempotency/powertools-idempotency-core/pom.xml
index 58d184fc5..846dea2d8 100644
--- a/powertools-idempotency/powertools-idempotency-core/pom.xml
+++ b/powertools-idempotency/powertools-idempotency-core/pom.xml
@@ -21,7 +21,7 @@
software.amazon.lambdapowertools-idempotency
- 2.0.0
+ 2.1.0powertools-idempotency-core
diff --git a/powertools-idempotency/powertools-idempotency-dynamodb/pom.xml b/powertools-idempotency/powertools-idempotency-dynamodb/pom.xml
index b92d66dbc..099eb12ab 100644
--- a/powertools-idempotency/powertools-idempotency-dynamodb/pom.xml
+++ b/powertools-idempotency/powertools-idempotency-dynamodb/pom.xml
@@ -21,7 +21,7 @@
software.amazon.lambdapowertools-idempotency
- 2.0.0
+ 2.1.0powertools-idempotency-dynamodb
diff --git a/powertools-kafka/pom.xml b/powertools-kafka/pom.xml
index f96c1eb82..deac5cc3e 100644
--- a/powertools-kafka/pom.xml
+++ b/powertools-kafka/pom.xml
@@ -21,7 +21,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0powertools-kafka
diff --git a/powertools-large-messages/pom.xml b/powertools-large-messages/pom.xml
index b23e3f41c..8eb0e143f 100644
--- a/powertools-large-messages/pom.xml
+++ b/powertools-large-messages/pom.xml
@@ -23,7 +23,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0
+ 2.1.0powertools-large-messages
diff --git a/powertools-logging/pom.xml b/powertools-logging/pom.xml
index 75d5853e5..2ee71ae67 100644
--- a/powertools-logging/pom.xml
+++ b/powertools-logging/pom.xml
@@ -21,7 +21,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0Powertools for AWS Lambda (Java) - Logging
diff --git a/powertools-logging/powertools-logging-log4j/pom.xml b/powertools-logging/powertools-logging-log4j/pom.xml
index 75aa94a97..6fe11ea93 100644
--- a/powertools-logging/powertools-logging-log4j/pom.xml
+++ b/powertools-logging/powertools-logging-log4j/pom.xml
@@ -7,7 +7,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0../../pom.xml
diff --git a/powertools-logging/powertools-logging-logback/pom.xml b/powertools-logging/powertools-logging-logback/pom.xml
index 8b2a5cfd5..6fd411c47 100644
--- a/powertools-logging/powertools-logging-logback/pom.xml
+++ b/powertools-logging/powertools-logging-logback/pom.xml
@@ -6,7 +6,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0../../pom.xml
diff --git a/powertools-metrics/pom.xml b/powertools-metrics/pom.xml
index 460eb220f..19b185f4d 100644
--- a/powertools-metrics/pom.xml
+++ b/powertools-metrics/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0Powertools for AWS Lambda (Java) - Metrics
diff --git a/powertools-parameters/pom.xml b/powertools-parameters/pom.xml
index 96f6f50b3..d61e76322 100644
--- a/powertools-parameters/pom.xml
+++ b/powertools-parameters/pom.xml
@@ -21,7 +21,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0powertools-parameters
diff --git a/powertools-parameters/powertools-parameters-appconfig/pom.xml b/powertools-parameters/powertools-parameters-appconfig/pom.xml
index a3822d11b..52b15296b 100644
--- a/powertools-parameters/powertools-parameters-appconfig/pom.xml
+++ b/powertools-parameters/powertools-parameters-appconfig/pom.xml
@@ -7,7 +7,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0
+ 2.1.0../../pom.xml
diff --git a/powertools-parameters/powertools-parameters-dynamodb/pom.xml b/powertools-parameters/powertools-parameters-dynamodb/pom.xml
index 9c7030d7c..0b82087c9 100644
--- a/powertools-parameters/powertools-parameters-dynamodb/pom.xml
+++ b/powertools-parameters/powertools-parameters-dynamodb/pom.xml
@@ -7,7 +7,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0
+ 2.1.0../../pom.xml
diff --git a/powertools-parameters/powertools-parameters-secrets/pom.xml b/powertools-parameters/powertools-parameters-secrets/pom.xml
index 99a308825..c90cf1c69 100644
--- a/powertools-parameters/powertools-parameters-secrets/pom.xml
+++ b/powertools-parameters/powertools-parameters-secrets/pom.xml
@@ -7,7 +7,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0
+ 2.1.0../../pom.xml
diff --git a/powertools-parameters/powertools-parameters-ssm/pom.xml b/powertools-parameters/powertools-parameters-ssm/pom.xml
index 46cf939ba..f03983fff 100644
--- a/powertools-parameters/powertools-parameters-ssm/pom.xml
+++ b/powertools-parameters/powertools-parameters-ssm/pom.xml
@@ -7,7 +7,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0
+ 2.1.0../../pom.xml
diff --git a/powertools-parameters/powertools-parameters-tests/pom.xml b/powertools-parameters/powertools-parameters-tests/pom.xml
index 4d2b5d145..9f8b12911 100644
--- a/powertools-parameters/powertools-parameters-tests/pom.xml
+++ b/powertools-parameters/powertools-parameters-tests/pom.xml
@@ -6,7 +6,7 @@
software.amazon.lambdapowertools-parent
- 2.0.0
+ 2.1.0../../pom.xml
diff --git a/powertools-serialization/pom.xml b/powertools-serialization/pom.xml
index 986a3b1d9..d6e04b3b8 100644
--- a/powertools-serialization/pom.xml
+++ b/powertools-serialization/pom.xml
@@ -21,7 +21,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0powertools-serialization
diff --git a/powertools-tracing/pom.xml b/powertools-tracing/pom.xml
index 14d6d51b1..d76ddffcb 100644
--- a/powertools-tracing/pom.xml
+++ b/powertools-tracing/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0Powertools for AWS Lambda (Java) - Tracing
diff --git a/powertools-validation/pom.xml b/powertools-validation/pom.xml
index bfedf8d40..a7403d6b3 100644
--- a/powertools-validation/pom.xml
+++ b/powertools-validation/pom.xml
@@ -24,7 +24,7 @@
powertools-parentsoftware.amazon.lambda
- 2.0.0
+ 2.1.0Powertools for AWS Lambda (Java) - Validation