Kafka 3:生产者异步发送

93 阅读1分钟

本文已参与「新人创作礼」活动,一起开启掘金创作之路

1.创建生产者KafkaAsyncProducer

在这里插入图片描述

package org.example.async;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import org.example.config.BusiConst;

import java.util.Properties;
import java.util.concurrent.Future;

/**
 * 异步发送
 **/
public class KafkaAsyncProducer {
    public static void main(String[] args) {
        //生产者必须指定3个属性(broker地址清单,key和value的序列化器)
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.42.111:9092");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        try {
            ProducerRecord<String, String> record;
            for (int i = 0; i < 4; i++) {
                record = new ProducerRecord<String,String>(BusiConst.HELLO_TOPIC,
                        String.valueOf(i), "Fisher");
                producer.send(record, new Callback() {
                    @Override
                    public void onCompletion(RecordMetadata recordMetadata, Exception e) {
                        if (e != null) {
                            e.printStackTrace();
                        }
                        if (recordMetadata != null) {
                            System.out.println("offset:" + recordMetadata.offset() + ";partition:" + recordMetadata.partition());
                        }
                    }
                });
            }
        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            producer.close();
        }
    }
}

2.创建消费者HelloKafkaConsumer

package org.example.helloKafka;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.example.config.BusiConst;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class HelloKafkaConsumer {
    public static void main(String[] args) {
        //消费者必须指定3个属性(broker地址清单,key和value的反序列化器)
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "192.168.42.111:9092");
        properties.put("key.deserializer", StringDeserializer.class);
        properties.put("value.deserializer", StringDeserializer.class);
        //群组并非必须
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "test1");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);
        //订阅主题(可以多个)
        consumer.subscribe(Collections.singletonList(BusiConst.HELLO_TOPIC));
        try {
            while (true) {
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(500));
                for (ConsumerRecord<String, String> record : records) {
                    System.out.println("主题:"+record.topic()+",分区:"+record.partition()+",偏移量:"+record.offset()+
                            ",key:"+record.key()+",value:"+record.value());
                }
            }
        } finally {
            consumer.close();
        }
    }
}

3.先启动消费者,再启动生产者,生产4条消息

在这里插入图片描述

4.查看消费者打印

在这里插入图片描述