1.创建自动配置类
@Configuration
@EnableConfigurationProperties(MessageQueueProperties.class)
public class MessageQueueAutoConfiguration {
@Bean(name = "kafkaMessageQueue")
@ConditionalOnClass(KafkaProducer.class)
@ConditionalOnProperty(value = "mq.kafka.enableProducer", havingValue = "true")
public MessageQueue kafkaMessageQueue(KafkaProducer<String, String> kafkaProducer) {
return new KafkaMessageQueue(kafkaProducer);
}
@Bean(name = "rocketMqMessageQueue")
@ConditionalOnClass(DefaultMQProducer.class)
@ConditionalOnProperty(value = "mq.rocketmq.enableProducer", havingValue = "true")
public MessageQueue rocketMessageQueue(DefaultMQProducer defaultMQProducer) {
return new RocketMessageQueue(defaultMQProducer);
}
@Bean
@ConditionalOnMissingBean
@ConditionalOnProperty(value = "mq.rocketmq.enableProducer", havingValue = "true")
public DefaultMQProducer defaultMQProducer(MessageQueueProperties messageQueueProperties) throws MQClientException {
TransactionMQProducer transactionMQProducer =
new TransactionMQProducer(messageQueueProperties.getRocketmq().getProducerGroup(),
new AclClientRPCHook(new SessionCredentials(messageQueueProperties.getRocketmq().getAccessKey(),
messageQueueProperties.getRocketmq().getSecretKey())));
transactionMQProducer.setNamesrvAddr(messageQueueProperties.getRocketmq().getServer());
transactionMQProducer.setSendMsgTimeout(messageQueueProperties.getRocketmq().getSendMsgTimeout());
transactionMQProducer.start();
return transactionMQProducer;
}
@Bean
@ConditionalOnMissingBean(name = "kafkaProperties")
@ConditionalOnProperty(value = "mq.kafka.enableProducer", havingValue = "true")
public Properties kafkaProperties(MessageQueueProperties messageQueueProperties) {
Properties properties = new Properties();
properties.put("bootstrap.servers", messageQueueProperties.getKafka().getServer());
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("acks", "1");
properties.put("max.in.flight.requests.per.connection", "1");
properties.put("retries", Integer.MAX_VALUE);
properties.put("batch.size", 16 * 1024);
properties.put("linger.ms", 10);
properties.put("buffer.memory", 32 * 1024 * 1024);
return properties;
}
@Bean
@ConditionalOnMissingBean
@ConditionalOnClass(KafkaProducer.class)
@ConditionalOnProperty(value = "mq.kafka.enableProducer", havingValue = "true")
public KafkaProducer<String, String> kafkaProducer(@Qualifier("kafkaProperties") Properties properties) {
return new KafkaProducer<>(properties);
}
@Bean
@ConditionalOnClass(KafkaConsumer.class)
@ConditionalOnProperty(value = mq.kafka.enableConsumer", havingValue = "true")
public KafkaConsumer kafkaConsumer(MessageQueueProperties messageQueueProperties, List<MessageListener> listeners) {
List<MessageListener> listenersResult =
listeners.stream().filter(this::matchKafka).collect(Collectors.toList());
return new KafkaConsumer(messageQueueProperties, listenersResult);
}
@Bean
@ConditionalOnClass(RocketMQConsumer.class)
@ConditionalOnProperty(value = "mq.rocketmq.enableConsumer", havingValue = "true")
public RocketMQConsumer rocketMqConsumer(MessageQueueProperties messageQueueProperties,
List<MessageListener> listeners) throws Exception {
List<MessageListener> listenersResult =
listeners.stream().filter(this::matchRocketMQ).collect(Collectors.toList());
return new RocketMQConsumer(messageQueueProperties, listenersResult);
}
private boolean matchRocketMQ(MessageListener messageListener) {
Class<? extends MessageListener> clazz = messageListener.getClass();
if (!clazz.isAnnotationPresent(MessageQueueListener.class)) {
return false;
}
MessageQueueListener annotation = clazz.getAnnotation(MessageQueueListener.class);
return annotation.type().equals(MessageQueueType.ROCKETMQ);
}
private boolean matchKafka(MessageListener messageListener) {
Class<? extends MessageListener> clazz = messageListener.getClass();
if (!clazz.isAnnotationPresent(MessageQueueListener.class)) {
return false;
}
MessageQueueListener annotation = clazz.getAnnotation(MessageQueueListener.class);
return annotation.type().equals(MessageQueueType.KAFKA);
}
}
2.创建MQ类型枚举
public enum MessageQueueType {
UNKNOWN,
KAFKA,
ROCKETMQ
}
3.创建mq配置属性
@Data
@ConfigurationProperties(prefix = "mq")
public class MessageQueueProperties {
private Kafka kafka;
private RocketMQ rocketmq;
@Data
@AllArgsConstructor
@NoArgsConstructor
public static class Kafka {
private Boolean enableProducer;
private Boolean enableConsumer;
private String server;
}
@Data
@AllArgsConstructor
@NoArgsConstructor
public static class RocketMQ {
private Boolean enableProducer;
private Boolean enableConsumer;
private String server;
private String producerGroup = "default-group";
private int sendMsgTimeout;
private String accessKey;
private String secretKey;
}
}
4.创建消息监听注解
@Component
@Documented
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface MessageQueueListener {
/**
* @return topic
*/
String topic();
/**
* 消费者组
*
* @return 消费者组
*/
String consumerGroup() default "default-consumer-group";
/**
* 类型
*
* @return 类型
*/
MessageQueueType type() default MessageQueueType.UNKNOWN;
/**
* 消费tags
*
* @return tags
*/
String tags() default "*";
}
5.Producer
Kafka
@Slf4j
@AllArgsConstructor
public class KafkaMessageQueue implements MessageQueue {
private KafkaProducer<String, String> kafkaProducer;
@Override
public void send(String topic, String message) {
send(topic, message, null);
}
@Override
public void send(String topic, String message, String tags) {
send(topic, message, tags, null);
}
@Override
public void send(String topic, String message, String tags, String key) {
send(topic, message, tags, key, -1);
}
@Override
public void send(String topic, String message, String tags, String key, Integer delayTimeLevel) {
if (StringUtils.isNotBlank(tags)) {
log.warn("Kafka not support parameter: tags -> {}", tags);
}
if (delayTimeLevel > 0) {
log.warn("Kafka not support delay feature: delayTimeLevel -> {}", delayTimeLevel);
}
ProducerRecord<String, String> record;
if (StringUtils.isNotBlank(key)) {
record = new ProducerRecord<>(topic, key, message);
} else {
record = new ProducerRecord<>(topic, message);
}
kafkaProducer.send(record, (metadata, exception) -> {
if (exception == null) {
log.info("发送消息到Kafka成功");
} else {
log.error("发送消息到Kafka失败:", exception);
throw new RuntimeException(exception);
}
});
}
}
消息队列接口
public interface MessageQueue {
/**
* 发送消息
*
* @param topic topic
* @param message message
*/
void send(String topic, String message) throws MessageQueueException;
/**
* 发送消息
*
* @param topic topic
* @param message message
* @param tags RocketMq生效
*/
void send(String topic, String message, String tags) throws MessageQueueException;
/**
* 发送消息
*
* @param topic topic
* @param message message
* @param tags RocketMq生效
* @param key RocketMQ唯一的Key,Kafka用于分区的key
*/
void send(String topic, String message, String tags, String key) throws MessageQueueException;
/**
* 发送消息
*
* @param topic topic
* @param message message
* @param tags RocketMq生效
* @param key RocketMQ唯一的Key,Kafka用于分区的key
* @param delayTimeLevel 延迟时间,kafka不支持
*/
void send(String topic, String message, String tags, String key, Integer delayTimeLevel) throws MessageQueueException;
}
RocketMq
@Slf4j
@AllArgsConstructor
public class RocketMessageQueue implements MessageQueue {
private DefaultMQProducer producer;
@Override
public void send(String topic, String message) throws MessageQueueException {
send(topic, message, null);
}
@Override
public void send(String topic, String message, String tags) throws MessageQueueException {
send(topic, message, tags, null);
}
@Override
public void send(String topic, String message, String tags, String key) throws MessageQueueException {
send(topic, message, tags, key, -1);
}
@Override
public void send(String topic, String message, String tags, String key, Integer delayTimeLevel) throws MessageQueueException {
Message msg = new Message(topic, tags, key, message.getBytes(StandardCharsets.UTF_8));
try {
if (delayTimeLevel > 0) {
msg.setDelayTimeLevel(delayTimeLevel);
}
SendResult send;
send = producer.send(msg);
if (SendStatus.SEND_OK == send.getSendStatus()) {
log.info("发送MQ消息成功, message={}", message);
} else {
throw new MessageQueueException(send.getSendStatus().toString());
}
} catch (Exception e) {
throw new MessageQueueException(e);
}
}
}
6.Consumer
消息确认
public interface Acknowledgement {
/**
* 提交offset
*/
void ack();
}
消息监听接口
public interface MessageListener {
/**
* 处理消息
*
* @param message 消息
*/
default void onMessage(String message){
onMessage(message, () -> {});
}
/**
* 处理消息
*
* @param message 消息
* @param acknowledgement 提交offset,处理完消息一定要提交
*/
void onMessage(String message, Acknowledgement acknowledgement);
}
Kafka
@Slf4j
public class KafkaConsumer {
private final List<MessageListener> messageListeners;
private final MessageQueueProperties messageQueueProperties;
public KafkaConsumer(MessageQueueProperties messageQueueProperties, List<MessageListener> messageListeners) {
this.messageQueueProperties = messageQueueProperties;
this.messageListeners = messageListeners;
initConsumer();
}
private void initConsumer() {
Properties properties = new Properties();
properties.put("bootstrap.servers", messageQueueProperties.getKafka().getServer());
properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("enable.auto.commit", "false");
properties.put("auto.commit.interval.ms", "1000");
properties.put("auto.offset.reset", "latest");
if (messageListeners == null || messageListeners.isEmpty()) {
return;
}
for (MessageListener messageListener : messageListeners) {
Processor processor = new Processor(messageListener, properties);
processor.start();
}
}
private String topic(MessageListener messageListener){
Class<? extends MessageListener> clazz = messageListener.getClass();
MessageQueueListener annotation = clazz.getAnnotation(MessageQueueListener.class);
return annotation.topic();
}
private boolean isRun() {
return true;
}
private class Processor extends Thread {
private final MessageListener messageListener;
private final org.apache.kafka.clients.consumer.KafkaConsumer<String, String> consumer;
public Processor(MessageListener messageListener, Properties properties) {
this.messageListener = messageListener;
Iterator<Map.Entry<Object, Object>> iterator = properties.entrySet().iterator();
Properties copyProperties = new Properties();
while (iterator.hasNext()){
Map.Entry<Object, Object> next = iterator.next();
copyProperties.put(next.getKey(),next.getValue());
}
copyProperties.put("group.id", "business-group");
this.consumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(copyProperties);
setDaemon(true);
String topic = topic(messageListener);
setName("Kafka-Processor-Topic-" + topic);
this.consumer.subscribe(Collections.singleton(topic));
}
@Override
public void run() {
while (isRun()) {
try {
ConsumerRecords<String, String> records = this.consumer.poll(Duration.ofSeconds(1));
if (records == null || records.isEmpty()) {
continue;
}
// default max poll 500 message
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>(500);
for (ConsumerRecord<String, String> record : records) {
String value = record.value();
offsets.put(new TopicPartition(record.topic(), record.partition()),
new OffsetAndMetadata(record.offset() + 1));
messageListener.onMessage(value, () -> this.consumer.commitSync(offsets));
offsets.clear();
}
} catch (Exception e) {
log.error("消费消息产生错误:" + e);
}
}
}
}
}
RocketMq
@Slf4j
public class RocketMQConsumer {
private final List<MessageListener> messageListeners;
private final MessageQueueProperties messageQueueProperties;
public RocketMQConsumer(MessageQueueProperties messageQueueProperties, List<MessageListener> messageListeners) throws Exception {
this.messageQueueProperties = messageQueueProperties;
this.messageListeners = messageListeners;
initConsumer();
}
private void initConsumer() throws Exception {
for (MessageListener listener : messageListeners) {
Class<? extends MessageListener> clazz = listener.getClass();
MessageQueueListener annotation = clazz.getAnnotation(MessageQueueListener.class);
DefaultMQPushConsumer consumer = new DefaultMQPushConsumer(null, annotation.consumerGroup(),
new AclClientRPCHook(new SessionCredentials(messageQueueProperties.getRocketmq().getAccessKey(),
messageQueueProperties.getRocketmq().getSecretKey())));
consumer.setNamesrvAddr(messageQueueProperties.getRocketmq().getServer());
consumer.subscribe(annotation.topic(), annotation.tags());
consumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
for (MessageExt messageExt : msgs) {
String message = new String(messageExt.getBody());
listener.onMessage(message);
}
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
consumer.start();
}
}
}
7.使用
kafka消息监听
@MessageQueueListener(topic = "aaa", type = MessageQueueType.KAFKA)
public class KafkaDemoMQListener implements MessageListener {
@Override
public void onMessage(String message, Acknowledgement acknowledgement) {
try {
System.out.println("开始消费:" + message);
acknowledgement.ack(); // 对这条消息去进行ack处理
} catch (Exception e) {
e.printStackTrace();
}
}
}
rocketmq消息监听
@MessageQueueListener(topic = "aaa", type = MessageQueueType.ROCKETMQ, consumerGroup = default-group")
public class RocketMQDemoMQListener implements MessageListener {
@Override
public void onMessage(String message, Acknowledgement acknowledgement) {
System.out.println(message);
}
}
发送消息
注入MessageQueue,调用send即可