实现方案2种

1.Springboot参数拦截 

继承AbstractNamedValueMethodArgumentResolver

2.Springboot AOP  所有String类型的参数提交都会被验证,替换规则参考百度

  @Around实现

package com.bysk.admin.common.filter;

import com.bysk.base.util.RedisUtils;
import lombok.SneakyThrows;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;

import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;

/**
 * @author: zhangyb
 * @date: 2020/11/19 9:21
 * @Description: 敏感词过滤 只会拦截使用了Post和put请求的方法即修改和新增,感觉会影响性能,
 *              后面建议使用单独注解实现关键字过滤
 * @UpdateUser : zhangyb
 * @UpdateDate :2020/11/19 9:21
 * @UpdateRemark:
 */
@Aspect
@Component
public class SensitiveWord {
    @Autowired
    RedisUtils redisUtils;
    @Autowired
    WordFilter wordFilter;
    @Around("@annotation(org.springframework.web.bind.annotation.PostMapping)||@annotation(org.springframework.web.bind.annotation.PutMapping)")
    @SneakyThrows
    public Object doBefore(ProceedingJoinPoint point) {
        HttpServletRequest request = ((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest();
        if (request.getRequestURI().contains("/word/sensitiveword")) {
            return point.proceed();
        }
        //所有的参数对象
        for (Object arg : point.getArgs()) {
            //参数对象,通过反射将String类型的值进行敏感词过滤
            Class<?> aClass = arg.getClass();
            //递归遍历,将所有String参数进行敏感词匹配
            foundString(aClass,arg);
        }
        return  point.proceed();
    }

    /**
    * @author: zhangyb
    * @date:  2020/11/19 13:57
    * @Description: 递归将String替换
    * @UpdateUser : zhangyb
    * @UpdateDate :2020/11/19 13:57
    * @UpdateRemark:
    */
    @SneakyThrows
    public Class<?> foundString(Class clazz,Object arg ){
        Field[] declaredFields = clazz.getDeclaredFields();
        for (Field declaredField : declaredFields) {
            Class<?> type = declaredField.getType();
            if (type==String.class&&!Modifier.toString(declaredField.getModifiers()).contains("final")){
                //如果是String类型,进行关键词匹配 且要排除final修饰的字段
                    declaredField.setAccessible(true);
                    String value=(String)declaredField.get(arg);
                    declaredField.set(arg, wordFilter.replaceWords(value));
            }else if (type.getPackage()!=null&&type.getPackage().getName().contains("com.bysk")){
                        Method[] methods = clazz.getMethods();
                        for (Method method : methods) {
                            String name = method.getName();
                            if (name.toLowerCase().contains("get"+declaredField.getName().toLowerCase())){
                                Object invoke = method.invoke(arg);
                                this.foundString(type,invoke);
                                break;
                            }
                        }
                    }
        }
        return  clazz;
    }
}
package com.bysk.admin.common.filter;
import com.bysk.admin.modules.word.mapper.SensitiveWordMapper;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;

import java.util.*;
/**
 * @Author: zhangyb
 * @Date: 2020/11/19 11:13
 */

/**
 * @program: 农事云
 * @description:
 * @Author: Zhangyb
 * @CreateDate: 11:13
 * @UpdateUser:
 * @UpdateDate
 * @UpdateRemark:
 * @Version: 1.0
 */
@Slf4j
@Component
public class WordFilter {
    private final static String WORDS = "WORDS";
    private final static String REPLACE_CHAR = "*";
    private static HashMap sensitiveWordMap;
    /**     * 最小匹配规则     */
    private static int minMatchTYpe = 1;
    /**     * 最大匹配规则     */
    private static int maxMatchType = 2;
    @Autowired
    private RedisTemplate<String, Object> redisTemplate;
    @Autowired
    private SensitiveWordMapper sensitiveWordMapper ;

    public  String replaceWords(String text) {
        if (StringUtils.isBlank(text)) {
            return text;
        }
        List<Object> words = redisTemplate.opsForList().range("SensitiveWord", 0, -1);
        if (words.size()<=0){
            words=sensitiveWordMapper.listStr();
            //更新redis
            redisTemplate.opsForList().leftPushAll("SensitiveWord",sensitiveWordMapper.listStr());
        }
        //缓存获取敏感词汇原记录
        return WordFilter.replaceSensitiveWord(words, text, WordFilter.minMatchTYpe);
    }

    /**
     * 替换敏感字字符
     *
     * @param data      敏感字集合
     * @param txt       待检查文本
     * @param matchType 匹配规则
     */
    private static String replaceSensitiveWord(List<Object> data, String txt, int matchType) {
        if (sensitiveWordMap == null) {
            addSensitiveWord(data);
        }
        String resultTxt = txt;
        //获取所有的敏感词
        List<String> set = getSensitiveWord(txt, matchType);
        Iterator<String> iterator = set.iterator();
        while (iterator.hasNext()) {
            resultTxt = resultTxt.replaceAll(iterator.next(), REPLACE_CHAR);
        }
        return resultTxt;
    }


    /**
     * 读取敏感词库,将敏感词放入HashSet中,构建一个DFA算法模型:
     * 说明:该方法来源于互联网
     */
    private static void addSensitiveWord(List<Object> datas) {
        sensitiveWordMap = new HashMap(datas.size());
        Iterator<Object> iterator = datas.iterator();
        Map<String, Object> now = null;
        Map now2 = null;
        while (iterator.hasNext()) {
            now2 = sensitiveWordMap;
            String word = (String)iterator.next(); //敏感词
            word=word.trim();
            for (int i = 0; i < word.length(); i++) {
                char key_word = word.charAt(i);
                Object obj = now2.get(key_word);
                if (obj != null) { //存在
                    now2 = (Map) obj;
                } else { //不存在
                    now = new HashMap<String, Object>();
                    now.put("isEnd", "0");
                    now2.put(key_word, now);
                    now2 = now;
                }
                if (i == word.length() - 1) {
                    now2.put("isEnd", "1");
                }
            }
        }
    }

    /**
     * 获取内容中的敏感词
     *说明:该方法来源于互联网
     * @param text      内容
     * @param matchType 匹配规则 1=不最佳匹配,2=最佳匹配
     * @return
     */
    private static List<String> getSensitiveWord(String text, int matchType) {
        List<String> words = new ArrayList<String>();
        Map now = sensitiveWordMap;
        int count = 0; //初始化敏感词长度
        int start = 0; //标志敏感词开始的下标
        for (int i = 0; i < text.length(); i++) {
            char key = text.charAt(i);
            now = (Map) now.get(key);
            if (now != null) { //存在
                count++;
                if (count == 1) {
                    start = i;
                }
                if ("1".equals(now.get("isEnd"))) { //敏感词结束
                    now = sensitiveWordMap; //重新获取敏感词库
                    words.add(text.substring(start, start + count)); //取出敏感词,添加到集合
                    count = 0; //初始化敏感词长度
                }
            } else { //不存在
                now = sensitiveWordMap;//重新获取敏感词库
                if (count == 1 && matchType == 1) { //不最佳匹配
                    count = 0;
                } else if (count == 1 && matchType == 2) { //最佳匹配
                    words.add(text.substring(start, start + count));
                    count = 0;
                }
            }
        }
        return words;
    }
    public WordFilter() {
        super();
    }
}