Kafka客户端

183 阅读1分钟

依赖

slf4j依赖并非必须的,不导入也不会影响到生产者消费者之间的通讯
只不过kafka的日志依赖于org.slf4j,不引入的话无法在客户端看到kafka打印的日志信息,并且会收到sl4j的警告信息

<dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka-clients</artifactId>
    <version>2.1.0</version>
</dependency>
 
<dependency>
    <groupId>org.slf4j</groupId>
    <artifactId>slf4j-simple</artifactId>
    <version>1.7.25</version>
    <scope>compile</scope>
</dependency>

生产者

import java.util.Properties;
import java.util.Scanner;
 
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
 
/**
 * @author TongHao on 2021/1/7
 */
public class KafkaProducerAnalysis {
 
    /** 有多个可以用逗号隔开 */
    public static final String brokerList = "******";
    public static final String topic = "kafka_demo_analysis";
 
    public static Properties initConfig() {
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.CLIENT_ID_CONFIG, "0");
        properties.put(ProducerConfig.RETRIES_CONFIG, 10); //重试次数
        return properties;
    }
 
    public static void main(String[] args) {
        Properties properties = KafkaProducerAnalysis.initConfig();
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        Scanner scanner = new Scanner(System.in);
        for (int i = 0; i < 20; i++) {
            try {
                ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, scanner.nextLine());
                producer.send(producerRecord, new Callback() {
                    @Override
                    public void onCompletion(RecordMetadata metadata, Exception exception) {
                        if (exception != null) {
                            //TODO
                            exception.printStackTrace();
                        } else {
                            System.out.println(metadata.topic() + "-" + metadata.partition() + "-" + metadata.offset());
                        }
                    }
                });
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
        producer.close();
    }
}

消费者

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
 
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
 
/**
 * @author TongHao on 2021/1/7
 */
public class KafkaConsumerAnalysis {
    /** 有多个可以用逗号隔开 */
    public static final String brokerList = "*****";
    public static final String topic = "kafka_demo_analysis";
    /** 消费组的名称 */
    public static final String groupId = "kafka-learner";
 
    public static Properties initConfig() {
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); //消费组
        return properties;
    }
 
    public static void main(String[] args) {
        Properties properties = KafkaConsumerAnalysis.initConfig();
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties);
        /** 订阅主题 */
        consumer.subscribe(Collections.singletonList(topic));
        while (true) {
            ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofMillis(1000));
            consumerRecords.forEach(System.out::println);
        }
    }
}

结果打印