Redis-Stream结构实现简单消息队列

174 阅读3分钟

前言

环境依赖

<dependency>
   <groupId>org.springframework.boot</groupId>
   <artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
    <groupId>tech.powerjob</groupId>
    <artifactId>powerjob-worker-spring-boot-starter</artifactId>
    <version>4.3.2</version>
</dependency>

定义MQ

读取yaml配置文件

  • 定义配置文件
redis:
  mq:
    streams: 
      # stream key名称
      - name: stream:message:platform
        groups:
          # 消费组名称
          - name: stream:message:group:receive
            # 消费者名称
            consumers: stream:message:consumer:a
  • 对应主题常量类
public class RedisKeyConstants {

    public static final class StreamKeyConstantS {

        public static final String STRING_MESSAGE_KEY = "stream:message:platform";

        public static final String STRING_MESSAGE_GROUP = "stream:message:group:receive";

        public static final String STRING_MESSAGE_CONSUMER = "stream:message:consumer:a";
    }
}
  • 创建配置类对象
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Configuration;

import java.util.List;

@EnableConfigurationProperties
@Configuration
@ConfigurationProperties(prefix = "redis.mq")
public class RedisMqProperties {

    private List<RedisMqStream> streams;

    public List<RedisMqStream> getStreams() {
        return streams;
    }

    public void setStreams(List<RedisMqStream> streams) {
        this.streams = streams;
    }
}

import java.util.List;

public class RedisMqStream {

    /**
     * stream-key
     */
    private String name;
    /**
     * 消费者数组
     */
    private List<RedisMqGroup> groups;

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public List<RedisMqGroup> getGroups() {
        return groups;
    }

    public void setGroups(List<RedisMqGroup> groups) {
        this.groups = groups;
    }
}
public class RedisMqGroup {

    /**
     * 消费组名称
     */
    private String name;

    /**
     * 消费组数组
     */
    private String[] consumers;

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public String[] getConsumers() {
        return consumers;
    }

    public void setConsumers(String[] consumers) {
        this.consumers = consumers;
    }
}

封装StreamRdisTemplate操作stream工具类

import org.springframework.data.domain.Range;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.PendingMessage;
import org.springframework.data.redis.connection.stream.PendingMessages;
import org.springframework.data.redis.connection.stream.RecordId;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.util.CollectionUtils;

import java.util.*;
import java.util.stream.Collectors;

public class RedisCacheUtil {

    private final StringRedisTemplate stringRedisTemplate;

    public RedisCacheUtil(StringRedisTemplate stringRedisTemplate) {
        this.stringRedisTemplate = stringRedisTemplate;
    }

    public String createGroup(String key, String group) {
        return stringRedisTemplate.opsForStream().createGroup(key, group);
    }

    public String addMap(String key, Map<Object, Object> value) {
        return Optional.ofNullable(stringRedisTemplate.opsForStream().add(key, value))
                .map(RecordId::getValue).orElse("");
    }

    public Long ack(String key, String group, String... recordIds) {
        return stringRedisTemplate.opsForStream().acknowledge(key, group, recordIds);
    }

    public Long del(String key, String... recordIds) {
        return stringRedisTemplate.opsForStream().delete(key, recordIds);
    }

    public boolean hasKey(String key) {
        Boolean aBoolean = stringRedisTemplate.hasKey(key);
        return aBoolean != null && aBoolean;
    }

    public List<PendingMessage> pending(String key, String group) {
        PendingMessages pending = stringRedisTemplate.opsForStream().pending(key, group, Range.unbounded(), Long.MAX_VALUE);
        return pending.stream().collect(Collectors.toList());
    }

    public List<MapRecord<String, Object, Object>> range(String key, Set<RecordId> recordIds) {
        if (CollectionUtils.isEmpty(recordIds)) {
            return new ArrayList<>();
        }

        // 消息id排序
        List<String> sortedMessageIds = recordIds.stream().map(RecordId::getValue)
                .sorted(Comparator.comparingLong(messageId -> Long.parseLong(messageId.split("-")[0])))
                .sorted(Comparator.comparingInt(messageId -> Integer.parseInt(messageId.split("-")[1])))
                .collect(Collectors.toList());

        // 消息范围 闭区间
        Range<String> range = Range.closed(sortedMessageIds.get(0), sortedMessageIds.get(sortedMessageIds.size() - 1));
        return stringRedisTemplate.opsForStream().range(key, range);
    }
}

创建注册类

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.stream.Consumer;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.ReadOffset;
import org.springframework.data.redis.connection.stream.StreamOffset;
import org.springframework.data.redis.stream.StreamListener;
import org.springframework.data.redis.stream.StreamMessageListenerContainer;
import org.springframework.data.redis.stream.Subscription;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;

@Configuration
public class RedisStreamConfig {

    private static final Logger LOGGER = LoggerFactory.getLogger(RedisStreamConfig.class);

    private final RedisCacheUtil redisCacheUtil;
    private final RedisMqProperties redisMqProperties;
    private final ThreadPoolTaskExecutor redisMqPoolTaskExecutor;

    public RedisStreamConfig(RedisCacheUtil redisCacheUtil, RedisMqProperties redisMqProperties, ThreadPoolTaskExecutor redisMqPoolTaskExecutor) {
        this.redisCacheUtil = redisCacheUtil;
        this.redisMqProperties = redisMqProperties;
        this.redisMqPoolTaskExecutor = redisMqPoolTaskExecutor;
    }

    @Bean
    public List<Subscription> subscription(RedisConnectionFactory factory, Abstractconsumer abstractconsumer) {
        List<Subscription> resultList = new ArrayList<>();
        // 配置消息监听容器的选项
        StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, MapRecord<String, String, String>> options =
                StreamMessageListenerContainer.StreamMessageListenerContainerOptions.builder()
                        //一次最多获取多少条消息
                        .batchSize(5)
                        .executor(redisMqPoolTaskExecutor)
                        //设置了轮询超时的时间为1000毫秒。没有新的消息时,容器将每隔1000秒进行一次轮询.pollTimeout(Duration.ofMillis(1000))
                        //.errorHandlerO
                        .build();

        StreamMessageListenerContainer<String, MapRecord<String, String, String>> listenerContainer = StreamMessageListenerContainer.create(factory, options);

        redisMqProperties.getStreams().forEach(stream -> {
            initStream(stream.getName(), stream.getGroups().get(0).getName());

            if (RedisKeyConstants.StreamKeyConstantS.STRING_MESSAGE_KEY.equals(stream.getName())) {
                resultList.add(receive(stream.getName(), stream.getGroups().get(0), () -> abstractconsumer, listenerContainer));
            }
        });
        listenerContainer.start();
        return resultList;
    }

    /**
     * 配置了容器来接收来自特定消费者组和消费者名称的消息。它还指定了要读取消息的起始偏移量,以确定从哪里开始读取消息
     *
     * @param streamName-key
     * @param redisMgGroup      消费组
     * @param supplier          消费者监听器
     * @param listenerContainer 监听容器
     */
    private Subscription receive(String streamName, RedisMqGroup redisMgGroup,
                                 Supplier<StreamListener<String, MapRecord<String, String, String>>> supplier,
                                 StreamMessageListenerContainer<String, MapRecord<String, String, String>> listenerContainer) {
        return listenerContainer.receive(
                //每个组只配备一个consumer(同组内如有多个consumer,一条消息最多只能被一个consumer消费)
                Consumer.from(redisMgGroup.getName(), redisMgGroup.getConsumers()[0]),
                StreamOffset.create(streamName, ReadOffset.lastConsumed()),
                supplier.get()
        );
    }


    /**
     * 初始化stream
     *
     * @param key   主题
     * @param group 消贵组
     */
    private void initStream(String key, String group) {
        boolean hasKey = redisCacheUtil.hasKey(key);
        if (!hasKey) {
            Map<Object, Object> map = new HashMap<>(1);
            map.put("field", "value");
            // 创建主题
            String result = redisCacheUtil.addMap(key, map);
            // 创建消费组
            redisCacheUtil.createGroup(key, group);
            //将初始化的值删除掉rediscacheutil,del(key, result);
            LOGGER.info("stream:{}-group:{} initialize success", key, group);
        }
    }
}

消费者

import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.stream.StreamListener;

public abstract class Abstractconsumer implements StreamListener<String, MapRecord<String, String, String>> {
}

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.RecordId;

import java.util.Map;

public class MessageConsumer extends Abstractconsumer {
    private static final Logger LOGGER = LoggerFactory.getLogger(MessageConsumer.class);

    private final RedisCacheUtil redisCacheUtil;

    public MessageConsumer(RedisCacheUtil redisCacheUtil) {
        this.redisCacheUtil = redisCacheUtil;
    }

    @Override
    public void onMessage(MapRecord<String, String, String> message) {
        //stream的key值
        String streamKey = message.getStream();
        //消息ID
        RecordId recordId = message.getId();
        //消息内容
        Map<String, String> msg = message.getValue();

        //处理逻辑
        msg.forEach((k, v) -> {
            if (RedisKeyConstants.StreamKeyConstantS.STRING_MESSAGE_GROUP.equals(k)) {
                LOGGER.info("Messageconsumer A ==》 streamKey:{}, recordId:{}, msg:{}", streamKey, recordId, msg);
                // 业务处理逻辑
            }
        });

        //逻辑处理完成后,ack消息,删除消息,qoup为消组名称
        redisCacheUtil.ack(streamKey, RedisKeyConstants.StreamKeyConstantS.STRING_MESSAGE_GROUP, recordId.getValue());
        redisCacheUtil.del(streamKey, recordId.getValue());
    }
}

防失败


import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.PendingMessage;
import org.springframework.data.redis.connection.stream.RecordId;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;

import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;

public class RedisPendingMessageResendJob implements BasicProcessor {

    private static final Logger LOGGER = LoggerFactory.getLogger(RedisPendingMessageResendJob.class);

    private final RedisCacheUtil redisCacheUtil;
    private final RedisMqProperties redisMqProperties;

    public RedisPendingMessageResendJob(RedisCacheUtil redisCacheUtil, RedisMqProperties redisMqProperties) {
        this.redisCacheUtil = redisCacheUtil;
        this.redisMqProperties = redisMqProperties;
    }

    @Override
    public ProcessResult process(TaskContext taskContext) throws Exception {

        for (RedisMqStream stream : redisMqProperties.getStreams()) {
            for (RedisMqGroup group : stream.getGroups()) {
                //直接从消费者组队列中查询未确认消息
                Map<RecordId, PendingMessage> pendingMessageMap = redisCacheUtil.pending(stream.getName(), group.getName())
                        .stream()
                        //当未确认消息时间超过2分钟才重新投递消息,防止正在处理的消息被更新投递
                        .filter(e -> e.getElapsedTimeSinceLastDelivery().getSeconds() > 120).collect(Collectors.toMap(PendingMessage::getId, Function.identity()));

                //从队列中读取指定范围消息-过波取出pending 消息
                List<MapRecord<String, Object, Object>> pendingRecords = redisCacheUtil.range(stream.getName(), pendingMessageMap.keySet()).stream()
                        //只取 pending 消息
                        .filter(e -> pendingMessageMap.containsKey(e.getId()))
                        .collect(Collectors.toList());

                pendingRecords.forEach(record -> {
                    // 重新投递消息
                    redisCacheUtil.addMap(stream.getName(), record.getValue());

                    //老的消息ack确认
                    redisCacheUtil.ack(stream.getName(), group.getName(), record.getId().getValue());
                    //老的消息删除
                    redisCacheUtil.del(stream.getName(), record.getId().getValue());

                    //已读取次数重新投递 次数不会 +1这里都是1
                    LOGGER.info(" 【RedisPendingMessageResendJob】消息 recordId:{}已读取未acK时间:{}秒 已读取次数:{} 重新投递",
                            record.getId().getValue(),
                            pendingMessageMap.get(record.getId()).getElapsedTimeSinceLastDelivery().getSeconds(),
                            pendingMessageMap.get(record.getId()).getTotalDeliveryCount());
                });
            }
        }
        return new ProcessResult(true);
    }
}