基于 DFS 的敏感词过滤
SensitiveWordFilter
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
package com.xxx.sensitiveword;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
@Slf4j
@Component
@SuppressWarnings("all")
public class SensitiveWordFilter {
@Resource
private SensitiveWordLoader sensitiveWordLoader;
// 根节点
private Map<Object, Object> rootNode = new HashMap<>();
/**
* 最小匹配规则
*/
public static int minMatchType = 1;
/**
* 最大匹配规则
*/
public static int maxMatchType = 2;
public static final String END_KEY = "isEnd";
public static final String IS_END = "1";
public static final String NOT_END_ = "0";
@PostConstruct
public void init() throws Exception {
reload();
}
public void reload() {
CompletableFuture.runAsync(this::load);
}
private void load() {
// 读取所有敏感词
log.debug(" load >> sensitiveWordLoader.readAllSensitiveWord()");
Set<String> wordSet = sensitiveWordLoader.readAllSensitiveWord();
log.debug(" addSensitiveWordToHashMap >> 将敏感词放入HashSet中,构建一个DFA算法模型 ");
addSensitiveWordToHashMap(wordSet);
log.debug(" load << 敏感词加载完毕!");
}
/**
* 获取文字中的敏感词
*
* @param txt 文字
* @param matchType 匹配规则。1:最小匹配规则,2:最大匹配规则
* @return Set
*/
public Set<String> getSensitiveWord(String txt, int matchType) {
log.debug("getSensitiveWord >> txt:{}, matchType:{}", txt, matchType);
Set<String> sensitiveWordSet = new HashSet<>();
for (int i = 0; i < txt.length(); i++) {
// 判断是否包含敏感字符
int length = checkSensitiveWord(txt, i, matchType);
if (length > 0) { // 存在,加入list中
sensitiveWordSet.add(txt.substring(i, i + length));
i = i + length - 1; // 减1的原因,是因为for会自增
}
}
log.debug("getSensitiveWord << resp:{} ", sensitiveWordSet);
return sensitiveWordSet;
}
/**
* 替换敏感字字符
*
* @param txt 需要检测的文本
* @param matchType 匹配规则 1:最小匹配规则,2:最大匹配规则
* @param replaceChar 替换字符,默认*
*/
public String replaceSensitiveWord(String txt, int matchType, String replaceChar) {
log.debug("replaceSensitiveWord >> txt:{}, matchType:{}, replaceChar:{}", txt, matchType, replaceChar);
String resultTxt = txt;
// 获取所有的敏感词
Set<String> set = getSensitiveWord(txt, matchType);
log.debug("replaceSensitiveWord >> getSensitiveWord:{} ", set);
for (String word : set) {
String replaceString = getReplaceChars(replaceChar, word.length());
resultTxt = resultTxt.replaceAll(word, replaceString);
}
log.debug("replaceSensitiveWord >> resultTxt:{} ", resultTxt);
return resultTxt;
}
/**
* 获取替换字符串
*
* @param replaceChar 替换符
* @param length 长度
* @return String
*/
private String getReplaceChars(String replaceChar, int length) {
log.debug("getReplaceChars >> replaceChar:{}, length:{} ", replaceChar, length);
StringBuilder resultReplace = new StringBuilder(replaceChar);
for (int i = 1; i < length; i++) {
resultReplace.append(replaceChar);
}
log.debug("getReplaceChars << resp:{} ", resultReplace);
return resultReplace.toString();
}
public int checkSensitiveWord(String txt) {
return checkSensitiveWord(txt, 0, minMatchType);
}
/**
* 检查文字中是否包含敏感字符,检查规则如下:
*
* @param txt 需要检测的文本
* @param beginIndex 开始位置
* @param matchType 匹配规则 1:最小匹配规则,2:最大匹配规则
* @return 如果存在,则返回敏感词字符的长度,不存在返回0
*/
public int checkSensitiveWord(String txt, int beginIndex, int matchType) {
log.debug("checkSensitiveWord >> txt:{}, beginIndex:{}, matchType:{} ", txt, beginIndex, matchType);
boolean flag = false; // 敏感词结束标识位:用于敏感词只有1位的情况
int matchFlag = 0; // 匹配标识数默认为0
Map<Object, Object> nowMap = rootNode;
for (int i = beginIndex; i < txt.length(); i++) {
char word = txt.charAt(i);
// 获取指定key
nowMap = (Map<Object, Object>) nowMap.get(word);
if (nowMap != null) { // 存在,则判断是否为最后一个
// 找到相应key,匹配标识+1
matchFlag++;
// 如果为最后一个匹配规则,结束循环,返回匹配标识数
if (IS_END.equals(nowMap.get(END_KEY))) {
// 结束标志位为true
flag = true;
// 最小规则,直接返回,最大规则还需继续查找
if (minMatchType == matchType) {
break;
}
}
} else { // 不存在,直接返回
break;
}
}
if (matchFlag < 2 || !flag) { // 长度必须大于等于1,为词
matchFlag = 0;
}
log.debug("checkSensitiveWord << matchFlag:{} ", matchFlag);
return matchFlag;
}
/**
* 读取敏感词库,将敏感词放入HashSet中,构建一个DFA算法模型:<br>
*
* @param words 所有敏感词
*/
private void addSensitiveWordToHashMap(Set<String> words) {
// 初始化敏感词容器,减少扩容操作
rootNode = new HashMap<>(words.size());
Map<String, String> newWordMap;
for (String word : words) {
Map nowMap = rootNode;
for (int i = 0; i < word.length(); i++) {
// 转换成char型
char keyChar = word.charAt(i);
// 65279是一个空格
if ((int) keyChar == 65279) {
continue;
}
// 获取
Object wordMap = nowMap.get(keyChar);
if (wordMap != null) {
// 如果存在该key,直接赋值
nowMap = (Map) wordMap;
} else {
// 不存在则,则构建一个map,同时将isEnd设置为0,因为他不是最后一个
newWordMap = new HashMap<>();
// 不是最后一个
newWordMap.put(END_KEY, NOT_END_);
nowMap.put(keyChar, newWordMap);
nowMap = newWordMap;
}
if (i == word.length() - 1) {
// 最后一个
nowMap.put(END_KEY, IS_END);
}
}
}
}
}
SensitiveWordLoader
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
package com.xxx.sensitiveword;
import com.rhy.aibox.base.domain.po.SensitiveWord;
import com.rhy.aibox.base.service.IBaseSensitiveWordService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.Date;
import java.util.Set;
import java.util.stream.Collectors;
/**
* @author yanggj
* @version 1.0.0
* @date 2023/7/13 16:08
*/
@Slf4j
@Component
public class SensitiveWordLoader {
@Resource
private IBaseSensitiveWordService baseSensitiveWordService;
/**
* 读取敏感词库中的内容,将内容添加到set集合中
*
* @return Set
*/
public Set<String> readAllSensitiveWord() {
log.debug("readAllSensitiveWord >> 加载所有敏感词");
Date date = new Date();
return baseSensitiveWordService.list()
.parallelStream()
// 已启用,且未过期
.filter(item -> item.getExpireTime().after(date) && item.getDatastatusid().equals(1))
.map(SensitiveWord::getWord)
.collect(Collectors.toSet());
}
}
本文由作者按照 CC BY 4.0 进行授权