diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/pom.xml b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/pom.xml
index d27478d52c1..e8877b5f5d8 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/pom.xml
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/pom.xml
@@ -115,6 +115,13 @@
${testcontainer.version}
test
+
+ org.apache.seatunnel
+ connector-common
+ ${project.version}
+ test-jar
+ test
+
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/src/test/java/org/apache/seatunnel/e2e/connector/kafka/KafkaIT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/src/test/java/org/apache/seatunnel/e2e/connector/kafka/KafkaIT.java
index 7d717056b00..d070d2e178f 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/src/test/java/org/apache/seatunnel/e2e/connector/kafka/KafkaIT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/src/test/java/org/apache/seatunnel/e2e/connector/kafka/KafkaIT.java
@@ -39,8 +39,15 @@
import org.apache.seatunnel.api.table.type.SeaTunnelRowType;
import org.apache.seatunnel.common.utils.JsonUtils;
import org.apache.seatunnel.connectors.seatunnel.kafka.config.KafkaBaseConstants;
+import org.apache.seatunnel.connectors.seatunnel.kafka.config.KafkaBaseOptions;
+import org.apache.seatunnel.connectors.seatunnel.kafka.config.KafkaSourceOptions;
import org.apache.seatunnel.connectors.seatunnel.kafka.config.MessageFormat;
+import org.apache.seatunnel.connectors.seatunnel.kafka.config.StartMode;
import org.apache.seatunnel.connectors.seatunnel.kafka.serialize.DefaultSeaTunnelRowSerializer;
+import org.apache.seatunnel.connectors.seatunnel.kafka.sink.KafkaSinkFactory;
+import org.apache.seatunnel.connectors.seatunnel.kafka.source.KafkaSourceFactory;
+import org.apache.seatunnel.connectors.seatunnel.sink.SinkFlowTestUtils;
+import org.apache.seatunnel.connectors.seatunnel.source.SourceFlowTestUtils;
import org.apache.seatunnel.e2e.common.TestResource;
import org.apache.seatunnel.e2e.common.TestSuiteBase;
import org.apache.seatunnel.e2e.common.container.EngineType;
@@ -1255,7 +1262,7 @@ public void testFakeSourceToKafkaProtobufFormat(TestContainer container)
"c_bool",
"c_string",
"c_bytes",
- "Address",
+ "address",
"attributes",
"phone_numbers"
},
@@ -2013,6 +2020,261 @@ private SeaTunnelRow buildSeaTunnelRow() {
return seaTunnelRow;
}
+ @TestTemplate
+ public void testProtobufCaseSensitiveFieldNames(TestContainer container) throws Exception {
+ String topicName = "test_protobuf_case_sensitive_fieldnames_" + System.nanoTime();
+ SeaTunnelRowType seaTunnelRowType = buildCaseSensitiveSeaTunnelRowType();
+
+ Map schemaProperties = new HashMap<>();
+ schemaProperties.put("protobuf_message_name", "TestMessage");
+ schemaProperties.put(
+ "protobuf_schema",
+ "syntax = \"proto3\";\n"
+ + "package org.apache.seatunnel.format.protobuf;\n"
+ + "message TestMessage {\n"
+ + " int32 MyIntField = 1;\n"
+ + " string CamelCaseString = 2;\n"
+ + " string snake_case_field = 3;\n"
+ + " message NestedObject {\n"
+ + " string NestedField = 1;\n"
+ + " int32 AnotherField = 2;\n"
+ + " }\n"
+ + " NestedObject nestedObject = 4;\n"
+ + " map MyMapField = 5;\n"
+ + "}");
+
+ TableSchema schema =
+ TableSchema.builder()
+ .columns(
+ Arrays.asList(
+ IntStream.range(0, seaTunnelRowType.getTotalFields())
+ .mapToObj(
+ i ->
+ PhysicalColumn.of(
+ seaTunnelRowType
+ .getFieldName(i),
+ seaTunnelRowType
+ .getFieldType(i),
+ 0,
+ true,
+ null,
+ null))
+ .toArray(PhysicalColumn[]::new)))
+ .build();
+
+ CatalogTable catalogTable =
+ CatalogTable.of(
+ TableIdentifier.of("", "", "", "test"),
+ schema,
+ schemaProperties,
+ Collections.emptyList(),
+ "Protobuf case-sensitive test");
+
+ Map config = new HashMap<>();
+ config.put(KafkaBaseOptions.TOPIC.key(), topicName);
+ config.put(KafkaBaseOptions.BOOTSTRAP_SERVERS.key(), kafkaContainer.getBootstrapServers());
+ config.put(KafkaBaseOptions.FORMAT.key(), MessageFormat.PROTOBUF);
+ config.put("protobuf_message_name", "TestMessage");
+ config.put(
+ "protobuf_schema",
+ "syntax = \"proto3\";\n"
+ + "package org.apache.seatunnel.format.protobuf;\n"
+ + "message TestMessage {\n"
+ + " int32 MyIntField = 1;\n"
+ + " string CamelCaseString = 2;\n"
+ + " string snake_case_field = 3;\n"
+ + " message NestedObject {\n"
+ + " string NestedField = 1;\n"
+ + " int32 AnotherField = 2;\n"
+ + " }\n"
+ + " NestedObject nestedObject = 4;\n"
+ + " map MyMapField = 5;\n"
+ + "}");
+
+ List rows = createCaseSensitiveTestRows();
+
+ // Use SinkFlowTestUtils to write data to Kafka
+ SinkFlowTestUtils.runBatchWithCheckpointDisabled(
+ catalogTable, ReadonlyConfig.fromMap(config), new KafkaSinkFactory(), rows);
+
+ // Verify data from Kafka
+ ProtobufDeserializationSchema deserializationSchema =
+ new ProtobufDeserializationSchema(catalogTable);
+
+ List kafkaSTRow =
+ getKafkaSTRow(
+ topicName,
+ value -> {
+ try {
+ return deserializationSchema.deserialize(value);
+ } catch (IOException e) {
+ throw new RuntimeException("Error deserializing Kafka message", e);
+ }
+ });
+
+ Assertions.assertEquals(2, kafkaSTRow.size());
+
+ kafkaSTRow.forEach(
+ row -> {
+ Assertions.assertAll(
+ "Verify case-sensitive field values",
+ () -> Assertions.assertNotNull(row.getField(0)), // MyIntField
+ () -> Assertions.assertNotNull(row.getField(1)), // CamelCaseString
+ () -> Assertions.assertNotNull(row.getField(2)), // snake_case_field
+ () -> {
+ SeaTunnelRow nestedRow = (SeaTunnelRow) row.getField(3);
+ if (nestedRow != null) {
+ Assertions.assertNotNull(nestedRow.getField(0)); // NestedField
+ Assertions.assertNotNull(nestedRow.getField(1)); // AnotherField
+ }
+ },
+ () -> {
+ @SuppressWarnings("unchecked")
+ Map mapField =
+ (Map) row.getField(4);
+ if (mapField != null) {
+ Assertions.assertNotNull(mapField);
+ }
+ });
+ });
+ }
+
+ @TestTemplate
+ public void testProtobufCaseSensitiveToAssert(TestContainer container) throws Exception {
+ String topicName = "test_protobuf_case_sensitive_toassert_" + System.nanoTime();
+ SeaTunnelRowType seaTunnelRowType = buildCaseSensitiveSeaTunnelRowType();
+
+ // Write test data to Kafka first - load schema from config but use dynamic topic
+ String confFile = "/protobuf/kafka_protobuf_case_sensitive_to_assert.conf";
+ String path = getTestConfigFile(confFile);
+ Config config = ConfigFactory.parseFile(new File(path));
+ Config sourceConfig = config.getConfigList("source").get(0);
+ ReadonlyConfig readonlyConfig = ReadonlyConfig.fromConfig(sourceConfig);
+
+ DefaultSeaTunnelRowSerializer serializer =
+ getDefaultSeaTunnelRowSerializer(topicName, seaTunnelRowType, readonlyConfig);
+
+ List testRows = createCaseSensitiveTestRows();
+
+ for (SeaTunnelRow row : testRows) {
+ ProducerRecord producerRecord = serializer.serializeRow(row);
+ try {
+ producer.send(producerRecord).get();
+ } catch (InterruptedException | ExecutionException e) {
+ throw new RuntimeException("Error sending Kafka message", e);
+ }
+ }
+ producer.flush();
+
+ // Use SourceFlowTestUtils to read data from Kafka
+ Map sourceOptions = new HashMap<>();
+ sourceOptions.put(KafkaBaseOptions.TOPIC.key(), topicName);
+ sourceOptions.put(
+ KafkaBaseOptions.BOOTSTRAP_SERVERS.key(), kafkaContainer.getBootstrapServers());
+ sourceOptions.put(KafkaBaseOptions.FORMAT.key(), MessageFormat.PROTOBUF);
+ sourceOptions.put(
+ "protobuf_message_name",
+ readonlyConfig.get(KafkaBaseOptions.PROTOBUF_MESSAGE_NAME));
+ sourceOptions.put("protobuf_schema", readonlyConfig.get(KafkaBaseOptions.PROTOBUF_SCHEMA));
+ sourceOptions.put(KafkaSourceOptions.START_MODE.key(), StartMode.EARLIEST);
+ sourceOptions.put(
+ KafkaSourceOptions.CONSUMER_GROUP.key(),
+ "test_protobuf_case_sensitive_toassert_" + topicName);
+
+ // Add schema configuration to match the Protobuf schema
+ Map schemaConfig = new HashMap<>();
+ Map nestedObjectFields = new HashMap<>();
+ nestedObjectFields.put("NestedField", "string");
+ nestedObjectFields.put("AnotherField", "int");
+
+ List