1.依赖包
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>19.0</version>
</dependency>
2.配置RedisConfig.class
package com.my.equipment.config.RedisCluster;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.hash.Funnel;
import com.my.equipment.config.BloomFilter.BloomFilterHelper;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.env.Environment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.data.redis.connection.RedisClusterConfiguration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.lettuce.LettuceClientConfiguration;
import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory;
import org.springframework.data.redis.connection.lettuce.LettucePoolingClientConfiguration;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import java.util.HashMap;
import java.util.Map;
@Configuration
public class RedisConfig {
@Autowired
private Environment environment;
@Bean
@Primary
@ConfigurationProperties(prefix = "spring.redis.lettuce.pool")
public GenericObjectPoolConfig redisPool() {
return new GenericObjectPoolConfig();
}
@Primary
@Bean("redisClusterConfig")
public RedisClusterConfiguration redisClusterConfig(){
Map<String,Object> source=new HashMap<>();
source.put("spring.redis.cluster.nodes",environment.getProperty("spring.redis.cluster.nodes"));
RedisClusterConfiguration redisClusterConfiguration=new RedisClusterConfiguration(new MapPropertySource("RedisClusterConfiguration", source));
redisClusterConfiguration.setPassword(environment.getProperty("spring.redis.password"));
return redisClusterConfiguration;
}
@Bean("lettuceConnectionFactory")
@Primary
public LettuceConnectionFactory lettuceConnectionFactory(GenericObjectPoolConfig redisPool, @Qualifier("redisClusterConfig") RedisClusterConfiguration redisClusterConfig){
LettuceClientConfiguration clientConfiguration = LettucePoolingClientConfiguration.builder().poolConfig(redisPool).build();
return new LettuceConnectionFactory(redisClusterConfig,clientConfiguration);
}
@Bean("redisTemplate")
@Primary
public RedisTemplate redisTemplate(@Qualifier("lettuceConnectionFactory") RedisConnectionFactory redisConnectionFactory){
return getRedisTemplate(redisConnectionFactory);
}
private RedisTemplate getRedisTemplate(RedisConnectionFactory factory){
RedisTemplate<String,Object> template=new RedisTemplate<>();
template.setConnectionFactory(factory);
Jackson2JsonRedisSerializer jackson2JsonRedisSerializer=new Jackson2JsonRedisSerializer(Object.class);
ObjectMapper om = new ObjectMapper();
om.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
om.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
jackson2JsonRedisSerializer.setObjectMapper(om);
StringRedisSerializer stringRedisSerializer=new StringRedisSerializer();
template.setKeySerializer(stringRedisSerializer);
template.setHashKeySerializer(stringRedisSerializer);
template.setValueSerializer(jackson2JsonRedisSerializer);
template.setHashValueSerializer(jackson2JsonRedisSerializer);
template.afterPropertiesSet();
return template;
}
@Bean
public BloomFilterHelper<Integer> initBloomFilterHelper(){
return new BloomFilterHelper<>((Funnel<Integer>)(from,into)->into.putInt(from), 1000000, 0.01);
}
}
3.配置BloomFilter
package com.my.equipment.config.BloomFilter
import com.google.common.base.Preconditions
import com.google.common.hash.Funnel
import com.google.common.hash.Hashing
import org.springframework.stereotype.Component
public class BloomFilterHelper<T> {
/**
* hash 函数的个数
*/
private int numHashFunctions
/**
* bits 数据的长度
*/
private int bitSize
/**
* 用于将任意类型T的输入数据转化为Java基本类型的数据(byte、int、char等等)。这里是会转化为byte。
*/
private Funnel<T> funnel
public BloomFilterHelper(Funnel<T> funnel,int expectedInsertions,double fpp){
Preconditions.checkArgument(funnel != null, "funnel不能为空")
this.funnel = funnel
// 计算bit数组长度
bitSize = optimalNumOfBits(expectedInsertions, fpp)
// 计算hash方法执行次数
numHashFunctions = optimalNumOfHashFunctions(expectedInsertions, bitSize)
}
public int[] murmurHashOffset(T value) {
int[] offset = new int[numHashFunctions]
long hash64 = Hashing.murmur3_128().hashObject(value, funnel).asLong()
int hash1 = (int) hash64
int hash2 = (int) (hash64 >>> 32)
for (int i = 1
int nextHash = hash1 + i * hash2
if (nextHash < 0) {
nextHash = ~nextHash
}
offset[i - 1] = nextHash % bitSize
}
return offset
}
/**
* 计算hash方法执行次数
* @param n
* @param m
* @return
*/
private int optimalNumOfHashFunctions(long n, long m) {
int countOfHash = Math.max(1, (int) Math.round((double) m / n * Math.log(2)))
return countOfHash
}
/**
* 计算bit数组长度
* @param n
* @param p
* @return
*/
private int optimalNumOfBits(long n, double p) {
if (p==0){
//设定最小期望长度
p = Double.MIN_VALUE
}
int sizeOfBitArray = (int) (-n * Math.log(p) / (Math.log(2) * Math.log(2)))
return sizeOfBitArray
}
}
package com.my.equipment.config.BloomFilter;
import com.google.common.base.Preconditions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
@Service
public class RedisBloomFilter {
@Autowired
private RedisTemplate redisTemplate;
public <T> void addByBloomFilter(BloomFilterHelper<T> bloomFilterHelper,String key,T value){
Preconditions.checkArgument(bloomFilterHelper != null,"bloomFilterHelper不能为空");
int[] offset = bloomFilterHelper.murmurHashOffset(value);
for (int i : offset) {
redisTemplate.opsForValue().setBit(key,i,true);
}
}
public <T> boolean includeByBloomFilter(BloomFilterHelper<T> bloomFilterHelper,String key,T value){
Preconditions.checkArgument(bloomFilterHelper != null, "bloomFilterHelper不能为空");
int[] offset = bloomFilterHelper.murmurHashOffset(value);
for (int i : offset) {
if (!redisTemplate.opsForValue().getBit(key,i)){
return false;
}
}
return true;
}
}
4.测试使用
package com.my.equipment.utils.RedisUsing;
import com.my.equipment.config.BloomFilter.BloomFilterHelper;
import com.my.equipment.config.BloomFilter.RedisBloomFilter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class BloomFilterUtils {
@Autowired
private RedisBloomFilter redisBloomFilter;
@Autowired
private BloomFilterHelper bloomFilterHelper;
public <T> boolean addBloomFilter(String key,T value){
try {
redisBloomFilter.addByBloomFilter(bloomFilterHelper,"bloom:"+key,value);
} catch (Exception e) {
e.printStackTrace();
return false;
}
return true;
}
public <T> boolean checkBloomFilter(String key,T value){
boolean exist= redisBloomFilter.includeByBloomFilter(bloomFilterHelper,"bloom:"+key,value);
return exist;
}
}
- 4.1 将所有数据库数据存入bloomFilter
@Override
public void save(SimData simData) {
simDataMapper.insertSelective(simData);
int autoId= simData.getId();
bloomFilterUtils.addBloomFilter(String.valueOf(autoId),autoId);
}
@Override
@Transactional
// @Cacheable(key = "#id")
public SlaveSimData getSlaveData(int id) throws BloomException {
boolean hasKey = bloomFilterUtils.checkBloomFilter(String.valueOf(id), id)
if (!hasKey) {
//如果不存在,说明在数据库中必不存在该数据,直接返回错误
throw new BloomException()
} else {
SlaveSimData slaveSimData = null
if (redisTemplate.hasKey(id + "")) {
slaveSimData = (SlaveSimData) redisTemplate.opsForValue().get(id + "")
} else {
slaveSimData = slaveSimDataMapper.selectByPrimaryKey(id)
redisTemplate.opsForValue().set(id + "", slaveSimData)
System.out.println("生成数据缓存" + id)
}
return slaveSimData
}
}
补充
击穿问题
package com.my.equipment.utils.RedisUsing;
import com.my.equipment.web.seSlaveDao.SlaveSimDataMapper;
import com.my.equipment.web.seSlavePojo.SlaveSimData;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
@Component
public class RedisTemplateUtil {
private final RedisTemplate redisTemplate;
@Autowired
private SlaveSimDataMapper slaveSimDataMapper;
public RedisTemplateUtil(RedisTemplate redisTemplate) {
this.redisTemplate = redisTemplate;
}
public Object getHotkey(String key) {
Object value = redisTemplate.opsForValue().get(key);
if (value == null) {
Object key_mutex = "lock";
if (redisTemplate.opsForValue().setIfAbsent(key_mutex, "1",180,TimeUnit.SECONDS)) {
System.out.println("开启分布式锁");
value = slaveSimDataMapper.selectByPrimaryKey(Integer.valueOf(key));
redisTemplate.opsForValue().set(key, value,3600, TimeUnit.MILLISECONDS);
redisTemplate.delete(key_mutex);
} else {
try {
Thread.sleep(50);
System.out.println("其他线程阻塞");
} catch (InterruptedException e) {
e.printStackTrace();
}
getHotkey(key);
}
}
return value;
}
}