package test.config; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.jsontype.impl.LaissezFaireSubTypeValidator; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import lombok.Data; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.cache.CacheManager; import org.springframework.cache.annotation.EnableCaching; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.data.redis.cache.RedisCacheConfiguration; import org.springframework.data.redis.cache.RedisCacheWriter; import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.data.redis.core.RedisTemplate; import org.springframework.data.redis.serializer.GenericToStringSerializer; import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer; import org.springframework.data.redis.serializer.RedisSerializationContext; import org.springframework.util.CollectionUtils; import javax.annotation.Resource; import java.time.Duration; import java.util.HashMap; import java.util.Map; @Data @EnableCaching @Configuration @ConfigurationProperties(prefix = "spring.cache.redis") public class RedisConfig { //如果为空默认为12个小时 @Value("${spring.cache.redis.time-to-live:43200000}") private Long redisKeyTtl; private Map<String, Long> customTtl; @Resource private RedisTemplate redisTemplate; /** * 默认的字符串序列画 * @param factory 工厂类 * @return */ @Primary @Bean public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) { RedisTemplate<String, Object> redisTemplate = new RedisTemplate<>(); redisTemplate.setConnectionFactory(factory); //使用Jackson2JsonRedisSerializer来序列化和反序列化redis的value值 GenericToStringSerializer<String> stringSerializer = new GenericToStringSerializer<>(String.class); //key序列化 redisTemplate.setKeySerializer(stringSerializer); //value序列化 redisTemplate.setValueSerializer(jacksonSerializer()); //Hash key序列化 redisTemplate.setHashKeySerializer(stringSerializer); //Hash value序列化 redisTemplate.setHashValueSerializer(jacksonSerializer()); redisTemplate.afterPropertiesSet(); return redisTemplate; } /** * 设置redis的key 为 String, value为json * ttl 默认为12个小时 * * @return */ @Bean public CacheManager cacheManager() { //1.构造一个redis缓存管理器 return new RedisCacheManagerResolver( //redis 连接工厂(非阻塞缓存) new MyDefaultRedisCacheWriter(redisTemplate.getConnectionFactory()), // RedisCacheWriter.nonLockingRedisCacheWriter(redisTemplate.getConnectionFactory()), //缓存默认策略(ttl) this.newRedisCacheConfiguration(redisKeyTtl), //配置自定义策略含(自定义ttl) this.getRedisCacheConfigurationMap() ); } private Map<String, RedisCacheConfiguration> getRedisCacheConfigurationMap() { Map<String, RedisCacheConfiguration> configurationMap = new HashMap<>(); if (!CollectionUtils.isEmpty(customTtl)) { customTtl.forEach((k, v) -> configurationMap.put(k, newRedisCacheConfiguration(v))); } return configurationMap; } /** * 新建一个默认的redisCacheConfig * * @param redisKeyTtl 过期时间,单位是毫秒 * @return */ private static RedisCacheConfiguration newRedisCacheConfiguration(Long redisKeyTtl) { return RedisCacheConfiguration .defaultCacheConfig() // 设置key为String, value 为自动转Json的Object .serializeKeysWith(RedisSerializationContext.SerializationPair.fromSerializer(stringSerializer())) .serializeValuesWith(RedisSerializationContext.SerializationPair.fromSerializer(jacksonSerializer())) // 不缓存null .disableCachingNullValues() // 缓存数据保存12小时 .entryTtl(Duration.ofMillis(redisKeyTtl)); } /** * 默认的jacksonSerializer * * @return Jackson2JsonRedisSerializer */ private static Jackson2JsonRedisSerializer jacksonSerializer() { Jackson2JsonRedisSerializer<Object> jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer(Object.class); ObjectMapper objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); //反序列化时候遇到不匹配的属性并不抛出异常 objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); //序列化时候遇到空对象不抛出异常 objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); //反序列化的时候如果是无效子类型,不抛出异常 objectMapper.configure(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE, false); //不使用默认的dateTime进行序列化, objectMapper.configure(SerializationFeature.WRITE_DATE_KEYS_AS_TIMESTAMPS, false); //使用JSR310提供的序列化类,里面包含了大量的JDK8时间序列化类 objectMapper.registerModule(new JavaTimeModule()); //启用反序列化所需的类型信息,在属性中添加@class objectMapper.activateDefaultTyping(LaissezFaireSubTypeValidator.instance, ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY); jackson2JsonRedisSerializer.setObjectMapper(objectMapper); return jackson2JsonRedisSerializer; } private static GenericToStringSerializer stringSerializer() { return new GenericToStringSerializer<>(String.class); } }
package test.config; import org.springframework.dao.PessimisticLockingFailureException; import org.springframework.data.redis.cache.RedisCacheWriter; import org.springframework.data.redis.connection.RedisConnection; import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.data.redis.connection.RedisStringCommands.SetOption; import org.springframework.data.redis.core.Cursor; import org.springframework.data.redis.core.ScanOptions; import org.springframework.data.redis.core.types.Expiration; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; public class MyDefaultRedisCacheWriter implements RedisCacheWriter { private final RedisConnectionFactory connectionFactory; private final Duration sleepTime; MyDefaultRedisCacheWriter(RedisConnectionFactory connectionFactory) { this(connectionFactory, Duration.ZERO); } MyDefaultRedisCacheWriter(RedisConnectionFactory connectionFactory, Duration sleepTime) { Assert.notNull(connectionFactory, "ConnectionFactory must not be null!"); Assert.notNull(sleepTime, "SleepTime must not be null!"); this.connectionFactory = connectionFactory; this.sleepTime = sleepTime; } public void put(String name, byte[] key, byte[] value, @Nullable Duration ttl) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(key, "Key must not be null!"); Assert.notNull(value, "Value must not be null!"); this.execute(name, (connection) -> { if (shouldExpireWithin(ttl)) { connection.set(key, value, Expiration.from(ttl.toMillis(), TimeUnit.MILLISECONDS), SetOption.upsert()); } else { connection.set(key, value); } return "OK"; }); } public byte[] get(String name, byte[] key) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(key, "Key must not be null!"); return (byte[]) this.execute(name, (connection) -> { return connection.get(key); }); } public byte[] putIfAbsent(String name, byte[] key, byte[] value, @Nullable Duration ttl) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(key, "Key must not be null!"); Assert.notNull(value, "Value must not be null!"); return (byte[]) this.execute(name, (connection) -> { if (this.isLockingCacheWriter()) { this.doLock(name, connection); } byte[] var7; try { boolean put; if (shouldExpireWithin(ttl)) { put = connection.set(key, value, Expiration.from(ttl), SetOption.ifAbsent()); } else { put = connection.setNX(key, value); } if (put) { Object var11 = null; return (byte[]) var11; } var7 = connection.get(key); } finally { if (this.isLockingCacheWriter()) { this.doUnlock(name, connection); } } return var7; }); } public void remove(String name, byte[] key) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(key, "Key must not be null!"); this.execute(name, (connection) -> { return connection.del(new byte[][]{key}); }); } /** * 重写clean 方法,因为原来的clean方法中有使用keys * 进行查询数据库会,如果数据库中key的数量过多会阻塞数据库 * * @param name * @param pattern */ @Override public void clean(String name, byte[] pattern) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(pattern, "Pattern must not be null!"); this.execute(name, (connection) -> { boolean wasLocked = false; try { if (this.isLockingCacheWriter()) { this.doLock(name, connection); wasLocked = true; } // byte[][] keys = (byte[][])((Set)Optional.ofNullable(connection.keys(pattern)).orElse(Collections.emptySet())).toArray(new byte[0][]); //使用scan命令代替keys命令 byte[][] keys = scanKeys(pattern, connection); if (keys.length > 0) { connection.del(keys); } } finally { if (wasLocked && this.isLockingCacheWriter()) { this.doUnlock(name, connection); } } return "OK"; }); } /** * 通过scan扫描所有keys * * @param pattern 正则 * @param connection redis连接池 * @return 所有keys的byte */ private byte[][] scanKeys(byte[] pattern, RedisConnection connection) { Cursor<byte[]> cursor = connection.scan(new ScanOptions.ScanOptionsBuilder().match(new String(pattern)).count(1000).build()); Set<byte[]> byteSet = new HashSet<>(); while (cursor.hasNext()) { byteSet.add(cursor.next()); } return byteSet.toArray(new byte[0][]); } void lock(String name) { this.execute(name, (connection) -> { return this.doLock(name, connection); }); } void unlock(String name) { this.executeLockFree((connection) -> { this.doUnlock(name, connection); }); } private Boolean doLock(String name, RedisConnection connection) { return connection.setNX(createCacheLockKey(name), new byte[0]); } private Long doUnlock(String name, RedisConnection connection) { return connection.del(new byte[][]{createCacheLockKey(name)}); } boolean doCheckLock(String name, RedisConnection connection) { return connection.exists(createCacheLockKey(name)); } private boolean isLockingCacheWriter() { return !this.sleepTime.isZero() && !this.sleepTime.isNegative(); } private <T> T execute(String name, Function<RedisConnection, T> callback) { RedisConnection connection = this.connectionFactory.getConnection(); Object var4; try { this.checkAndPotentiallyWaitUntilUnlocked(name, connection); var4 = callback.apply(connection); } finally { connection.close(); } return (T) var4; } private void executeLockFree(Consumer<RedisConnection> callback) { RedisConnection connection = this.connectionFactory.getConnection(); try { callback.accept(connection); } finally { connection.close(); } } private void checkAndPotentiallyWaitUntilUnlocked(String name, RedisConnection connection) { if (this.isLockingCacheWriter()) { try { while (this.doCheckLock(name, connection)) { Thread.sleep(this.sleepTime.toMillis()); } } catch (InterruptedException var4) { Thread.currentThread().interrupt(); throw new PessimisticLockingFailureException(String.format("Interrupted while waiting to unlock cache %s", name), var4); } } } private static boolean shouldExpireWithin(@Nullable Duration ttl) { return ttl != null && !ttl.isZero() && !ttl.isNegative(); } private static byte[] createCacheLockKey(String name) { return (name + "~lock").getBytes(StandardCharsets.UTF_8); } }
package test.config; import org.springframework.data.redis.cache.RedisCache; import org.springframework.data.redis.cache.RedisCacheConfiguration; import org.springframework.data.redis.cache.RedisCacheManager; import org.springframework.data.redis.cache.RedisCacheWriter; import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.lang.Nullable; import java.util.Collections; import java.util.HashMap; import java.util.Map; public class RedisCacheManagerResolver extends RedisCacheManager { private final RedisCacheWriter cacheWriter; private final RedisCacheConfiguration defaultCacheConfig; public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration) { super(cacheWriter, defaultCacheConfiguration); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration, String... initialCacheNames) { super(cacheWriter, defaultCacheConfiguration, initialCacheNames); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration, boolean allowInFlightCacheCreation, String... initialCacheNames) { super(cacheWriter, defaultCacheConfiguration, allowInFlightCacheCreation, initialCacheNames); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration, Map<String, RedisCacheConfiguration> initialCacheConfigurations) { super(cacheWriter, defaultCacheConfiguration, initialCacheConfigurations); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration, Map<String, RedisCacheConfiguration> initialCacheConfigurations, boolean allowInFlightCacheCreation) { super(cacheWriter, defaultCacheConfiguration, initialCacheConfigurations, allowInFlightCacheCreation); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisConnectionFactory redisConnectionFactory, RedisCacheConfiguration cacheConfiguration) { this(RedisCacheWriter.nonLockingRedisCacheWriter(redisConnectionFactory),cacheConfiguration); } /** * 覆盖父类创建RedisCache,采用自定义的RedisCacheResolver * @Title: createRedisCache * @Description: 覆盖父类创建RedisCache,采用自定义的RedisCacheResolver * @param @param name * @param @param cacheConfig * @param @return * @throws * */ @Override protected RedisCache createRedisCache(String name, @Nullable RedisCacheConfiguration cacheConfig) { return new RedisCacheResolver(name, cacheWriter, cacheConfig != null ? cacheConfig : defaultCacheConfig); } @Override public Map<String, RedisCacheConfiguration> getCacheConfigurations() { Map<String, RedisCacheConfiguration> configurationMap = new HashMap<>(getCacheNames().size()); getCacheNames().forEach(it -> { RedisCache cache = RedisCacheResolver.class.cast(lookupCache(it)); configurationMap.put(it, cache != null ? cache.getCacheConfiguration() : null); }); return Collections.unmodifiableMap(configurationMap); } }
package test.config; import org.springframework.core.convert.ConversionService; import org.springframework.data.redis.cache.RedisCache; import org.springframework.data.redis.cache.RedisCacheConfiguration; import org.springframework.data.redis.cache.RedisCacheWriter; import org.springframework.util.StringUtils; public class RedisCacheResolver extends RedisCache { private final String name; private final RedisCacheWriter cacheWriter; private final ConversionService conversionService; protected RedisCacheResolver(String name, RedisCacheWriter cacheWriter, RedisCacheConfiguration cacheConfig) { super(name, cacheWriter, cacheConfig); this.name = name; this.cacheWriter = cacheWriter; this.conversionService = cacheConfig.getConversionService(); } /** * * @Title: evict * @Description: 重写删除的方法,支持按key的后缀进行模糊删除 * @param @param key * @throws * */ @Override public void evict(Object key) { if (key instanceof String) { String keyString = key.toString(); // 后缀删除 if (StringUtils.endsWithIgnoreCase(keyString, "*")) { evictLikeSuffix(keyString); return; } } // 删除指定的key super.evict(key); } /** * 后缀匹配匹配 * * @param key */ private void evictLikeSuffix(String key) { byte[] pattern = this.conversionService.convert(this.createCacheKey(key), byte[].class); this.cacheWriter.clean(this.name, pattern); } }
鉴于原生SpringCache存在以下问题,因此对SpringCache进行扩展 原生存在问题: 1.无法根据具体的cacheName自定义ttl 2.删除无法使用key进行精细的模糊删除 3.删除采用keys,在大数据量的情况下会导致redis阻塞 改进如下: 1.实现springCache中自定义cacheName的ttl 2.实现springCahce中删除缓存时支持key的模糊删除 3.优化SpringCache中查找keys的方法为scan