- Gradle依赖
plugins {
id 'com.google.protobuf' version '0.8.18'
}
apply plugin: 'com.google.protobuf'
repositories {
mavenCentral()
maven { url "https://packages.confluent.io/maven/" }
}
dependencies {
implementation "io.confluent:kafka-streams-avro-serde:7.0.1"
implementation "io.confluent:kafka-avro-serializer:7.0.1"
implementation 'org.apache.avro:avro:1.11.0'
// this is to generate avsc file
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-avro:2.12.1'
implementation "org.mapstruct:mapstruct:1.4.2.Final"
compileOnly "org.mapstruct:mapstruct-processor:1.4.2.Final"
annotationProcessor "org.mapstruct:mapstruct-processor:1.4.2.Final"
implementation 'com.google.protobuf:protobuf-java:3.19.3'
compileOnly "no.entur.mapstruct.spi:protobuf-spi-impl:1.22"
annotationProcessor "no.entur.mapstruct.spi:protobuf-spi-impl:1.22"
implementation "io.confluent:kafka-protobuf-serializer:7.0.1"
}
sourceSets {
main {
java {
proto {
srcDir 'generated/main/java'
}
}
}
}
protobuf {
// Configure the protoc executable
protoc {
// Download from repositories
artifact = "com.google.protobuf:protoc:3.19.3"
}
generatedFilesBaseDir = "$projectDir/src/main/proto/generated"
}
- 定义model
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class MyTestObject {
private String field1;
private long field2;
private int field3;
private boolean field4;
private List<String> field5;
private List<MyAvroTestInnerList1> field6;
private Map<String, MyAvroTestInnerMap1> field7;
private LocalDateTime field8;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class MyAvroTestInnerList1 {
private String innerListField1;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class MyAvroTestInnerMap1 {
private String innerMapField1;
}
- 定义proto文件: my_avro_test_inner_list_1.proto
syntax = "proto3";
package com.example.springbootstudygradle.model.protobuf;
option java_multiple_files = true;
message ProtoMyAvroTestInnerList1 {
string innerListField1 = 1;
}
my_avro_test_inner_map_1.proto
syntax = "proto3";
package com.example.springbootstudygradle.model.protobuf;
option java_multiple_files = true;
message ProtoMyAvroTestInnerMap1 {
string innerMapField1 = 1;
}
my_test_object.proto
syntax = "proto3";
package com.example.springbootstudygradle.model.protobuf;
import "google/protobuf/timestamp.proto";
import "my_avro_test_inner_list_1.proto";
import "my_avro_test_inner_map_1.proto";
option java_multiple_files = true;
message ProtoMyTestObject {
string field1 = 1;
int64 field2 = 2;
int32 field3 = 3;
bool field4 = 4;
repeated string field5 = 5;
repeated ProtoMyAvroTestInnerList1 field6 = 6;
map<string, ProtoMyAvroTestInnerMap1> field7 = 7;
google.protobuf.Timestamp field8 = 8;
}
- 使用gradle命令生成class文件
gradle assemble
gradle build
- 使用MapStruct将object转换成avro object
@Mapper(
collectionMappingStrategy = CollectionMappingStrategy.ADDER_PREFERRED,
nullValueCheckStrategy = NullValueCheckStrategy.ALWAYS,
unmappedSourcePolicy = ReportingPolicy.ERROR,
unmappedTargetPolicy = ReportingPolicy.ERROR)
public abstract class ProtoMyTestObjectMapper extends BaseMapper {
public static ProtoMyTestObjectMapper INSTANCE = Mappers.getMapper(ProtoMyTestObjectMapper.class);
public abstract MyTestObject map(ProtoMyTestObject protoMyTestObject);
@Mapping(target = "field7", source = "field7", ignore = true)
public abstract ProtoMyTestObject mapProtoMyTestObject(MyTestObject myTestObject);
public ProtoMyTestObject map(MyTestObject myTestObject){
ProtoMyTestObject protoMyTestObject = mapProtoMyTestObject(myTestObject);
ProtoMyTestObject.Builder protoMyTestObjectBuilder = protoMyTestObject.toBuilder();
setField7(myTestObject, protoMyTestObjectBuilder);
return protoMyTestObjectBuilder.build();
}
private void setField7(MyTestObject myTestObject, ProtoMyTestObject.Builder protoMyTestObjectBuilder) {
Map<String, MyAvroTestInnerMap1> map = myTestObject.getField7();
if (map == null || map.isEmpty()) {
return;
}
for (Map.Entry<String, MyAvroTestInnerMap1> entry : map.entrySet()) {
MyAvroTestInnerMap1 myAvroTestInnerMap1 = entry.getValue();
protoMyTestObjectBuilder.putField7(entry.getKey(), mapInnerMap(myAvroTestInnerMap1));
}
}
public abstract MyAvroTestInnerMap1 mapInnerMap(ProtoMyAvroTestInnerMap1 protoMyAvroTestInnerMap1);
public abstract ProtoMyAvroTestInnerMap1 mapInnerMap(MyAvroTestInnerMap1 myAvroTestInnerMap1);
}
- Kafka Producer code
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "http://localhost:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class);
properties.put(KafkaProtobufSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
//if true won't registry known proto object in schema registry
properties.put(KafkaProtobufSerializerConfig.SKIP_KNOWN_TYPES_CONFIG, false);
KafkaProducer<String, ProtoMyTestObject> producer = new KafkaProducer<String, ProtoMyTestObject>(properties);
ProducerRecord<String, ProtoMyTestObject> record = new ProducerRecord<String, ProtoMyTestObject>(
"protobuf_test", ProtoMyTestObjectMapper.INSTANCE.map(build()));
RecordMetadata recordMetadata = producer.send(record).get();
System.out.println(recordMetadata);
- 发送之后可以在registry上看到schema
http://localhost:8081/subjects/
[
"my_avro_test_inner_list_1.proto",
"my_avro_test_inner_map_1.proto",
"google/protobuf/timestamp.proto",
"protobuf_test_2-value"
]
http://localhost:8081/subjects/my_avro_test_inner_list_1.proto/versions/latest
{
"subject": "my_avro_test_inner_list_1.proto",
"version": 1,
"id": 3,
"schemaType": "PROTOBUF",
"schema": "syntax = \"proto3\";\npackage com.example.springbootstudygradle.model.protobuf;\n\noption java_multiple_files = true;\n\nmessage ProtoMyAvroTestInnerList1 {\n string innerListField1 = 1;\n}\n"
}
http://localhost:8081/subjects/my_avro_test_inner_map_1.proto/versions/latest
{
"subject": "my_avro_test_inner_map_1.proto",
"version": 1,
"id": 4,
"schemaType": "PROTOBUF",
"schema": "syntax = \"proto3\";\npackage com.example.springbootstudygradle.model.protobuf;\n\noption java_multiple_files = true;\n\nmessage ProtoMyAvroTestInnerMap1 {\n string innerMapField1 = 1;\n}\n"
}
http://localhost:8081/subjects/protobuf_test_2-value/versions/latest
{
"subject": "protobuf_test_2-value",
"version": 2,
"id": 5,
"schemaType": "PROTOBUF",
"references":
[
{
"name": "my_avro_test_inner_list_1.proto",
"subject": "my_avro_test_inner_list_1.proto",
"version": 1
},
{
"name": "my_avro_test_inner_map_1.proto",
"subject": "my_avro_test_inner_map_1.proto",
"version": 1
},
{
"name": "google/protobuf/timestamp.proto",
"subject": "google/protobuf/timestamp.proto",
"version": 1
}
],
"schema": "syntax = \"proto3\";\npackage com.example.springbootstudygradle.model.protobuf;\n\nimport \"my_avro_test_inner_list_1.proto\";\nimport \"my_avro_test_inner_map_1.proto\";\nimport public \"google/protobuf/timestamp.proto\";\n\noption java_multiple_files = true;\n\nmessage ProtoMyTestObject {\n string field1 = 1;\n int64 field2 = 2;\n int32 field3 = 3;\n bool field4 = 4;\n repeated string field5 = 5;\n repeated .com.example.springbootstudygradle.model.protobuf.ProtoMyAvroTestInnerList1 field6 = 6;\n repeated .com.example.springbootstudygradle.model.protobuf.ProtoMyTestObject.Field7Entry field7 = 7;\n .google.protobuf.Timestamp field8 = 8;\n\n message Field7Entry {\n option map_entry = true;\n \n string key = 1;\n .com.example.springbootstudygradle.model.protobuf.ProtoMyAvroTestInnerMap1 value = 2;\n }\n}\n"
}
- Kafka Streams 8.1 整合schema registry
public static void main(String[] args) {
final StreamsBuilder builder = new StreamsBuilder();
KStream<String, String> source =
builder.stream("input-topic-protobuf", Consumed.with(Serdes.String(), Serdes.String()));
KStream<String, MyTestObject> testObject = source.map((k, v) -> new KeyValue<>(k, build()));
testObject.to("test_stream_protobuf_1", Produced.with(Serdes.String(),
new MyTestObjectProtobufSerde(myKafkaProtobufSerde())));
final Topology topology = builder.build();
Properties properties = new Properties();
properties.put("application.id","application_id_test_01");
properties.put("bootstrap.servers","http://localhost:9092");
final KafkaStreams streams = new KafkaStreams(topology, properties);
final CountDownLatch latch = new CountDownLatch(1);
streams.setUncaughtExceptionHandler((exception) -> {
return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.REPLACE_THREAD;
});
Runtime.getRuntime()
.addShutdownHook(
new Thread("application_id_test_01_shutdown_hook") {
@Override
public void run() {
streams.close();
latch.countDown();
}
});
try {
streams.start();
latch.await();
} catch (Exception e) {
System.exit(1);
}
System.exit(0);
}
public static KafkaProtobufSerde<ProtoMyTestObject> myKafkaProtobufSerde() {
KafkaProtobufSerde<ProtoMyTestObject> kafkaProtobufSerde = new KafkaProtobufSerde<>();
// When you want to override serdes explicitly/selectively
HashMap<String, Object> map = new HashMap<>();
map.put("schema.registry.url", "http://localhost:8081");
map.put("skip.known.types", false);
serdeConfig.put(KafkaProtobufDeserializerConfig.SPECIFIC_PROTOBUF_VALUE_TYPE, ProtoMyTestObject.class.getName());
kafkaProtobufSerde.configure(map, false);
return kafkaProtobufSerde;
}
private static MyTestObject build(){
HashMap<String, MyAvroTestInnerMap1> map = new HashMap<>();
map.put("k", MyAvroTestInnerMap1.builder().innerMapField1("v").build());
return MyTestObject.builder()
.field1("s")
.field2(1)
.field3(2)
.field4(true)
.field5(Arrays.asList("d"))
.field6(Arrays.asList(MyAvroTestInnerList1.builder().innerListField1("l").build()))
.field7(map)
.field8(LocalDateTime.now())
.build();
}
8.2 自定义Serde
public class MyTestObjectProtobufSerde implements Serde<MyTestObject> {
private final KafkaProtobufSerde<ProtoMyTestObject> myKafkaProtobufSerde;
private final MyTestObjectProtobufSerializer myTestObjectProtobufSerializer;
private final MyTestObjectProtobufDeserializer myTestObjectProtobufDeserializer;
public MyTestObjectProtobufSerde(
KafkaProtobufSerde<ProtoMyTestObject> myKafkaProtobufSerde) {
this.myKafkaProtobufSerde = myKafkaProtobufSerde;
this.myTestObjectProtobufSerializer = new MyTestObjectProtobufSerializer(this.myKafkaProtobufSerde);
this.myTestObjectProtobufDeserializer = new MyTestObjectProtobufDeserializer(this.myKafkaProtobufSerde);
}
@Override
public Serializer<MyTestObject> serializer() {
return this.myTestObjectProtobufSerializer;
}
@Override
public Deserializer<MyTestObject> deserializer() {
return this.myTestObjectProtobufDeserializer;
}
public static class MyTestObjectProtobufSerializer implements Serializer<MyTestObject> {
private final Serializer<ProtoMyTestObject> myTestObjectProtobufSerializer;
public MyTestObjectProtobufSerializer(
KafkaProtobufSerde<ProtoMyTestObject> myKafkaProtobufSerde) {
this.myTestObjectProtobufSerializer = myKafkaProtobufSerde.serializer();
}
@Override
public byte[] serialize(String topic, MyTestObject myTestObject) {
if (myTestObject == null) {
return null;
}
ProtoMyTestObject protoMyTestObject = ProtoMyTestObjectMapper.INSTANCE.map(myTestObject);
return myTestObjectProtobufSerializer.serialize(topic, protoMyTestObject);
}
}
public static class MyTestObjectProtobufDeserializer implements Deserializer<MyTestObject> {
private final Deserializer<ProtoMyTestObject> myTestObjectProtobufDeserializer;
public MyTestObjectProtobufDeserializer(
KafkaProtobufSerde<ProtoMyTestObject> myKafkaProtobufSerde) {
this.myTestObjectProtobufDeserializer = myKafkaProtobufSerde.deserializer();
}
@Override
public MyTestObject deserialize(String topic, byte[] data) {
if (data == null) {
return null;
}
ProtoMyTestObject protoMyTestObject = myTestObjectProtobufDeserializer.deserialize(topic, data);
return ProtoMyTestObjectMapper.INSTANCE.map(protoMyTestObject);
}
}
}
8.3 测试
producer:
confluent-6.2.0 zhhqu$ kafka-console-producer --bootstrap-server localhost:9092 --topic input-topic
consumer:
kafka-console-consumer --bootstrap-server localhost:9092 --topic test_stream_protobuf_1 --from-beginning --formatter io.confluent.kafka.formatter.protobuf.ProtobufMessageFormatter --property schema.registry.url=http://localhost:8081