springboot集成kafka之消费者模式
本文主要介绍的是springboot集成kafka 并且有sasl认证如何进行绑定。
springboot的版本是2.1.0.RELEASE
kafka的版本是2.2.0.RELEASE
我在搭建的时候遇到过服务链接不上的问题disconnected,当时百度的时候有人说是springboot和kafka的版本不对应导致的,当时也走了很多弯路,但是后来发现不是这个问题。
pom文件
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>2.2.0.RELEASE</version>
</dependency>
其实在kafka2.0版本以后就可以在application.properties中配置kafka的相关信息,由springboot自动装配,但是当时我遇到了一点困难,尝试了很多办法,最终发现不是此配置的问题,所以如果大家使用的是kafka2.0之后的版本只要将配置存放在application.properties系统是会自动加载的。
后来我采取的是写配置文件进行加载代码如下:
KafkaConsumerConfig
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import java.util.HashMap;
import java.util.Map;
/**
* @Description 消费者配置
*
* @Author huang createTime:2020/11/16 13:57
* @return
*/
@Configuration
@PropertySource("classpath:config/consumer.properties")
public class KafkaConsumerConfig {
//@Autowired
//private BaseSysParamService baseSysParamService;
@Value("${kafka.bootstrap-servers}")
private String servers;
@Value("${kafka.consumer.enable-auto-commit}")
private boolean enableAutoCommit;
@Value("${kafka.consumer.session.timeout}")
private String sessionTimeout;
@Value("${kafka.consumer.auto-commit-interval}")
private String autoCommitInterval;
@Value("${kafka.consumer.group-id}")
private String groupId;
@Value("${kafka.consumer.auto-offset-reset}")
private String autoOffsetReset;
@Value("${kafka.consumer.concurrency}")
private int concurrency;
/** 一次调用poll()操作时返回的最大记录数,默认值为500 */
@Value("${kafka.consumer.max-poll-records}")
private int maxPollRecords;
@Value("${kafka.consumer.sasl-jaas-config}")
private String kafkaConsumerSASLJaasConfig;
// static {
// String path = Thread.currentThread().getContextClassLoader().getResource("config/consumer_jaas.conf").getPath();
//// System.setProperty("java.security.auth.login.config", "F:\workspace\springbootkafka\src\main\resources\config\consumer_jaas.conf");
// System.setProperty("java.security.auth.login.config", path);
// }
@Bean
public ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
//并发数量
factory.setConcurrency(concurrency);
//批量获取
factory.setBatchListener(true);
factory.getContainerProperties().setPollTimeout(1500);
return factory;
}
@Bean
public ConsumerFactory consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
@Bean
public Map consumerConfigs() {
//String kafkaIp = baseSysParamService.getValByName("third.kafka.ip");
//if (StringUtils.isNotBlank(kafkaIp)){
// servers = kafkaIp;
//}
//String kafkaSASL = baseSysParamService.getValByName("third.kafka.sasl");
//if (StringUtils.isNotBlank(kafkaSASL)){
// kafkaConsumerSASLJaasConfig = kafkaSASL;
//}
Map props =new HashMap<>();
//消费者参数设置
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,servers);
props.put(ConsumerConfig.GROUP_ID_CONFIG,groupId);
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,enableAutoCommit);
props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,autoCommitInterval);
props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG,sessionTimeout);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords);//批量消费
//用户密码认证参数
props.put("security.protocol", "SASL_PLAINTEXT");
props.put("sasl.mechanism", "SCRAM-SHA-256");
props.put("sasl.jaas.config", kafkaConsumerSASLJaasConfig);
return props;
}
@Bean
public KafkaProperties.Listener listener() {
return new KafkaProperties.Listener();
}
}
consumer.properties
kafka.bootstrap-servers=ip:port 多个的话用,链接
kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
kafka.consumer.group-id=NOTICE_91
kafka.consumer.auto-offset-reset=earliest
kafka.consumer.enable-auto-commit=true
kafka.consumer.auto-commit-interval=1000
kafka.consumer.session.timeout=6000
kafka.consumer.concurrency=10
kafka.consumer.max-poll-records=500
kafka.consumer.sasl-jaas-config=org.apache.kafka.common.security.scram.ScramLoginModule required username="xxxx" password="xxx";
此处特别需要注意的是sasl-jaas-config和sasl.mechanism是对应的,两个要改一起改,直接复制,千万别手敲,耗费两天的时间都是在这。
MyConsumer
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.asiainfo.recenterkafka.modual.tradeModual.service.TradeService;
import com.baomidou.mybatisplus.core.toolkit.IdWorker;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import lombok.AllArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
/**
* *@author lis
* *@date 2022/1/27
*/
@Component
@AllArgsConstructor
public class MyConsumer {
private final Logger log = LoggerFactory.getLogger(MyConsumer.class);
private final TradeService tradeService;
@KafkaListener(topics = "topic ")
public void onMessage(String record){
/* 先将record进行一次解析 */
JSONObject jsonObject = JSONObject.parseObject(record);
log.info("解析出来的入参为:" + jsonObject);
String orderId = String.valueOf(IdWorker.getId());
if(jsonObject.containsKey("ORDER_ID") && StringUtils.isNotBlank(String.valueOf(jsonObject.get("ORDER_ID")))){
orderId = String.valueOf(jsonObject.get("ORDER_ID"));
}
JSONObject param = new JSONObject();
param.put("id", IdWorker.getId());
param.put("orderId",orderId);
param.put("logType","RSP");
param.put("entityName","TRADEFROMKAFKA");
param.put("msg",jsonObject);
int result = tradeService.insertLogByKafka(param);
if(result == 0){
log.info("================================" + orderId + "插入数据出错");
}
}
}
希望此文档对大家有所帮助。