添加搜索模块,热搜前十,个人搜索记录,搜索记录添加,搜索记录删除,敏感词替换,redis配置没添加

This commit is contained in:
cyk 2023-12-19 21:35:57 +08:00
parent 0721107407
commit 9773f42df7
11 changed files with 1612 additions and 1 deletions

View File

@ -0,0 +1,33 @@
package com.lovenav.configuration;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.JdkSerializationRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
@Configuration
public class RedisConfig {
@Autowired
RedisConnectionFactory redisConnectionFactory;
@Bean
public RedisTemplate<String, Object> functionDomainRedisTemplate() {
RedisTemplate<String, Object> redisTemplate = new RedisTemplate<String, Object>();
initDomainRedisTemplate(redisTemplate, redisConnectionFactory);
return redisTemplate;
}
/*
*设置redisTemplate序列化策略否则在使用redisTemplate的时候在redis的客户端查看会出现乱码
*/
private void initDomainRedisTemplate(RedisTemplate<String, Object> redisTemplate, RedisConnectionFactory factory) {
redisTemplate.setKeySerializer(new StringRedisSerializer());
redisTemplate.setHashKeySerializer(new StringRedisSerializer());
redisTemplate.setHashValueSerializer(new JdkSerializationRedisSerializer());
redisTemplate.setValueSerializer(new JdkSerializationRedisSerializer());
redisTemplate.setConnectionFactory(factory);
}
}

View File

@ -0,0 +1,89 @@
package com.lovenav.configuration;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@Configuration
@SuppressWarnings({ "rawtypes", "unchecked" })
public class SensitiveWordInit {
// 字符编码
private String ENCODING = "UTF-8";
// 初始化敏感字库
public Map initKeyWord() throws IOException {
// 读取敏感词库 ,存入Set中
Set<String> wordSet = readSensitiveWordFile();
// 将敏感词库加入到HashMap中//确定有穷自动机DFA
return addSensitiveWordToHashMap(wordSet);
}
// 读取敏感词库 ,存入HashMap中
private Set<String> readSensitiveWordFile() throws IOException {
Set<String> wordSet = null;
ClassPathResource classPathResource = new ClassPathResource("static/word.txt");
InputStream inputStream = classPathResource.getInputStream();
//敏感词库
try {
// 读取文件输入流
InputStreamReader read = new InputStreamReader(inputStream, ENCODING);
// 文件是否是文件 是否存在
wordSet = new HashSet<String>();
// StringBuffer sb = new StringBuffer();
// BufferedReader是包装类先把字符读到缓存里到缓存满了再读入内存提高了读的效率
BufferedReader br = new BufferedReader(read);
String txt = null;
// 读取文件将文件内容放入到set中
while ((txt = br.readLine()) != null) {
wordSet.add(txt);
}
br.close();
// 关闭文件流
read.close();
} catch (Exception e) {
e.printStackTrace();
}
return wordSet;
}
// 将HashSet中的敏感词,存入HashMap中
private Map addSensitiveWordToHashMap(Set<String> wordSet) {
// 初始化敏感词容器减少扩容操作
Map wordMap = new HashMap(wordSet.size());
for (String word : wordSet) {
Map nowMap = wordMap;
for (int i = 0; i < word.length(); i++) {
// 转换成char型
char keyChar = word.charAt(i);
// 获取
Object tempMap = nowMap.get(keyChar);
// 如果存在该key直接赋值
if (tempMap != null) {
nowMap = (Map) tempMap;
}
// 不存在则则构建一个map同时将isEnd设置为0因为他不是最后一个
else {
// 设置标志位
Map<String, String> newMap = new HashMap<String, String>();
newMap.put("isEnd", "0");
// 添加到集合
nowMap.put(keyChar, newMap);
nowMap = newMap;
}
// 最后一个
if (i == word.length() - 1) {
nowMap.put("isEnd", "1");
}
}
}
return wordMap;
}
}

View File

@ -0,0 +1,7 @@
package com.lovenav.controller;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class AdminController {
}

View File

@ -0,0 +1,4 @@
package com.lovenav.controller;
public class SearchController {
}

View File

@ -0,0 +1,166 @@
package com.lovenav.filter;
import com.lovenav.configuration.SensitiveWordInit;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
* @ClassName SensitiveFilter
* Description:敏感词过滤器利用DFA算法 进行敏感词过滤
*/
@Component
public class SensitiveFilter {
/**
* 敏感词过滤器利用DFA算法 进行敏感词过滤
*/
private Map sensitiveWordMap = null;
/**
* 最小匹配规则敏感词库["中国","中国人"]语句"我是中国人"匹配结果我是[中国]
*/
public static int minMatchType = 1;
/**
* 最大匹配规则敏感词库["中国","中国人"]语句"我是中国人"匹配结果我是[中国人]
*/
public static int maxMatchType = 2;
/**
* 敏感词替换词
*/
public static String placeHolder = "**";
// 单例
private static SensitiveFilter instance = null;
/**
* 构造函数初始化敏感词库
*/
private SensitiveFilter() throws IOException {
sensitiveWordMap = new SensitiveWordInit().initKeyWord();
}
/**
* 获取单例
*/
public static SensitiveFilter getInstance() throws IOException {
if (null == instance) {
instance = new SensitiveFilter();
}
return instance;
}
/**
* 获取文字中的敏感词
*/
public Set<String> getSensitiveWord(String txt, int matchType) {
Set<String> sensitiveWordList = new HashSet<>();
for (int i = 0; i < txt.length(); i++) {
// 判断是否包含敏感字符
int length = CheckSensitiveWord(txt, i, matchType);
// 存在,加入list中
if (length > 0) {
sensitiveWordList.add(txt.substring(i, i + length));
// 减1的原因是因为for会自增
i = i + length - 1;
}
}
return sensitiveWordList;
}
/**
* 替换敏感字字符,使用了默认的替换符合默认最小匹配规则
*/
public String replaceSensitiveWord(String txt) {
return replaceSensitiveWord(txt, minMatchType ,placeHolder);
}
/**
* 替换敏感字字符,使用了默认的替换符合
*/
public String replaceSensitiveWord(String txt, int matchType) {
return replaceSensitiveWord(txt, matchType,placeHolder);
}
/**
* 替换敏感字字符
*/
public String replaceSensitiveWord(String txt, int matchType,
String replaceChar) {
String resultTxt = txt;
// 获取所有的敏感词
Set<String> set = getSensitiveWord(txt, matchType);
Iterator<String> iterator = set.iterator();
String word = null;
String replaceString = null;
while (iterator.hasNext()) {
word = iterator.next();
replaceString = getReplaceChars(replaceChar, word.length());
resultTxt = resultTxt.replaceAll(word, replaceString);
}
return resultTxt;
}
/**
* 获取替换字符串
*/
private String getReplaceChars(String replaceChar, int length) {
StringBuilder resultReplace = new StringBuilder(replaceChar);
for (int i = 1; i < length; i++) {
resultReplace.append(replaceChar);
}
return resultReplace.toString();
}
/**
* 检查文字中是否包含敏感字符检查规则如下<br>
* 如果存在则返回敏感词字符的长度不存在返回0
* 核心
*/
public int CheckSensitiveWord(String txt, int beginIndex, int matchType) {
// 敏感词结束标识位用于敏感词只有1的情况结束
boolean flag = false;
// 匹配标识数默认为0
int matchFlag = 0;
Map nowMap = sensitiveWordMap;
for (int i = beginIndex; i < txt.length(); i++) {
char word = txt.charAt(i);
// 获取指定key
nowMap = (Map) nowMap.get(word);
// 存在则判断是否为最后一个
if (nowMap != null) {
// 找到相应key匹配标识+1
matchFlag++;
// 如果为最后一个匹配规则,结束循环返回匹配标识数
if ("1".equals(nowMap.get("isEnd"))) {
// 结束标志位为true
flag = true;
// 最小规则直接返回,最大规则还需继续查找
if (SensitiveFilter.minMatchType == matchType) {
break;
}
}
}
// 不存在直接返回
else {
break;
}
}
// 匹配长度如果匹配上了最小匹配长度或者最大匹配长度
if (SensitiveFilter.maxMatchType == matchType || SensitiveFilter.minMatchType == matchType){
//长度必须大于等于1为词或者敏感词库还没有结束(匹配了一半)flag为false
if(matchFlag < 2 || !flag){
matchFlag = 0;
}
}
return matchFlag;
}
}

View File

@ -1,4 +1,4 @@
package com.lovenav.service; package com.lovenav.service;
public class AdminService { public interface AdminService {
} }

View File

@ -0,0 +1,22 @@
package com.lovenav.service;
import java.util.List;
public interface RedisService {
public Long addSearchHistoryByUserId(String userid, String searchkey);
//删除个人历史数据
Long delSearchHistoryByUserId(String userid, String searchkey);
//获取个人历史数据列表
List<String> getSearchHistoryByUserId(String userid);
//新增一条热词搜索记录将用户输入的热词存储下来
int incrementScoreByUserId(String searchkey);
//根据searchkey搜索其相关最热的前十名 (如果searchkey为null空则返回redis存储的前十最热词条)
List<String> getHotList(String searchkey);
//每次点击给相关词searchkey热度 +1
Long incrementScore(String searchkey);
}

View File

@ -0,0 +1,4 @@
package com.lovenav.service.serviceImpl;
public class AdminServiceImpl {
}

View File

@ -0,0 +1,198 @@
package com.lovenav.service.serviceImpl;
import com.lovenav.service.RedisService;
import com.lovenav.utils.RedisKeyUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.data.redis.core.*;
import javax.annotation.Resource;
import org.apache.commons.lang.StringUtils;
import java.util.*;
@Transactional
@Service("redisService")
public class RedisServiceImpl implements RedisService {
private Logger logger = LoggerFactory.getLogger(RedisService.class);
/**
* 取热搜前几名返回
*/
private static final Integer HOT_SEARCH_NUMBER = 9;
/**
* 多少时间内的搜索记录胃热搜
*/
private static final Long HOT_SEARCH_TIME = 30 * 24 * 60 * 60L;
@Resource
private StringRedisTemplate stringRedisTemplate;
//新增一条该userid用户在搜索栏的历史记录
//searchkey 代表输入的关键词
/**
* 新增一条该userid用户在搜索栏的历史记录
*/
public Long addSearchHistoryByUserId(String userId, String searchKey) {
try{
String redisKey = RedisKeyUtils.getSearchHistoryKey(userId);
// 如果存在这个key
boolean b = Boolean.TRUE.equals(stringRedisTemplate.hasKey(redisKey));
if (b) {
// 获取这个关键词hash的值有就返回没有就新增
Object hk = stringRedisTemplate.opsForHash().get(redisKey, searchKey);
if (hk != null) {
return 1L;
}else{
stringRedisTemplate.opsForHash().put(redisKey, searchKey, "1");
}
}else{
// 没有这个关键词就新增
stringRedisTemplate.opsForHash().put(redisKey, searchKey, "1");
}
return 1L;
}catch (Exception e){
logger.error("redis发生异常异常原因",e);
return 0L;
}
}
/**
* 删除个人历史数据
*/
public Long delSearchHistoryByUserId(String userId, String searchKey) {
try {
String redisKey = RedisKeyUtils.getSearchHistoryKey(userId);
// 删除这个用户的关键词记录
return stringRedisTemplate.opsForHash().delete(redisKey, searchKey);
}catch (Exception e){
logger.error("redis发生异常异常原因",e);
return 0L;
}
}
/**
* 获取个人历史数据列表
*/
public List<String> getSearchHistoryByUserId(String userId) {
try{
List<String> stringList = null;
String redisKey = RedisKeyUtils.getSearchHistoryKey(userId);
// 判断存不存在
boolean b = Boolean.TRUE.equals(stringRedisTemplate.hasKey(redisKey));
if(b){
stringList = new ArrayList<>();
// 逐个扫描ScanOptions.NONE为获取全部键对ScanOptions.scanOptions().match("map1").build() 匹配获取键位map1的键值对,不能模糊匹配
Cursor<Map.Entry<Object, Object>> cursor = stringRedisTemplate.opsForHash().scan(redisKey, ScanOptions.NONE);
while (cursor.hasNext()) {
Map.Entry<Object, Object> map = cursor.next();
String key = map.getKey().toString();
stringList.add(key);
}
return stringList;
}
return null;
}catch (Exception e){
logger.error("redis发生异常异常原因",e);
return null;
}
}
/**
* 根据searchKey搜索其相关最热的前十名 (如果searchKey为null空则返回redis存储的前十最热词条)
*/
public List<String> getHotList(String searchKey) {
try {
Long now = System.currentTimeMillis();
List<String> result = new ArrayList<>();
ZSetOperations<String, String> zSetOperations = stringRedisTemplate.opsForZSet();
ValueOperations<String, String> valueOperations = stringRedisTemplate.opsForValue();
Set<String> value = zSetOperations.reverseRangeByScore(RedisKeyUtils.getHotSearchKey(), 0, Double.MAX_VALUE);
//key不为空的时候 推荐相关的最热前十名
if(StringUtils.isNotEmpty(searchKey)){
for (String val : value) {
if (StringUtils.containsIgnoreCase(val, searchKey)) {
//只返回最热的前十名
if (result.size() > HOT_SEARCH_NUMBER) {
break;
}
Long time = Long.valueOf(Objects.requireNonNull(valueOperations.get(val)));
//返回最近一个月的数据
if ((now - time) < HOT_SEARCH_TIME) {
result.add(val);
} else {//时间超过一个月没搜索就把这个词热度归0
zSetOperations.add(RedisKeyUtils.getHotSearchKey(), val, 0);
}
}
}
}else{
for (String val : value) {
//只返回最热的前十名
if (result.size() > HOT_SEARCH_NUMBER) {
break;
}
Long time = Long.valueOf(Objects.requireNonNull(valueOperations.get(val)));
//返回最近一个月的数据
if ((now - time) < HOT_SEARCH_TIME) {
result.add(val);
} else {
//时间超过一个月没搜索就把这个词热度归0
zSetOperations.add(RedisKeyUtils.getHotSearchKey(), val, 0);
}
}
}
return result;
}catch (Exception e){
logger.error("redis发生异常异常原因",e);
return null;
}
}
/**
* 新增一条热词搜索记录将用户输入的热词存储下来
*/
public int incrementScoreByUserId(String searchKey) {
Long now = System.currentTimeMillis();
ZSetOperations<String, String> zSetOperations = stringRedisTemplate.opsForZSet();
ValueOperations<String, String> valueOperations = stringRedisTemplate.opsForValue();
List<String> title = new ArrayList<>();
title.add(searchKey);
for (int i = 0, length = title.size(); i < length; i++) {
String tle = title.get(i);
try {
if (zSetOperations.score(RedisKeyUtils.getHotSearchKey(), tle) <= 0) {
zSetOperations.add(RedisKeyUtils.getHotSearchKey(), tle, 0);
valueOperations.set(RedisKeyUtils.getSearchTimeKey(tle), String.valueOf(now));
}
} catch (Exception e) {
zSetOperations.add(RedisKeyUtils.getHotSearchKey(), tle, 0);
valueOperations.set(RedisKeyUtils.getSearchTimeKey(tle), String.valueOf(now));
}
}
return 1;
}
/**
* 每次点击给相关词searchKey热度 +1
*/
public Long incrementScore(String searchKey) {
try{
Long now = System.currentTimeMillis();
ZSetOperations<String, String> zSetOperations = stringRedisTemplate.opsForZSet();
ValueOperations<String, String> valueOperations = stringRedisTemplate.opsForValue();
// 没有的话就插入有的话的直接更新add是有就覆盖没有就插入
zSetOperations.incrementScore(RedisKeyUtils.getHotSearchKey(), searchKey, 1);
valueOperations.getAndSet(RedisKeyUtils.getSearchTimeKey(searchKey), String.valueOf(now));
return 1L;
}catch (Exception e){
logger.error("redis发生异常异常原因",e);
return 0L;
}
}
}

View File

@ -0,0 +1,23 @@
package com.lovenav.utils;
public class RedisKeyUtils {
/** * 分隔符号 */
private static final String SPLIT = ":";
private static final String SEARCH = "search";
private static final String SEARCH_HISTORY = "search-history";
private static final String HOT_SEARCH = "hot-search";
private static final String SEARCH_TIME = "search-time";
/** * 每个用户的个人搜索记录hash */
public static String getSearchHistoryKey(String userId){
return SEARCH + SPLIT + SEARCH_HISTORY + SPLIT + userId;
}
/** * 总的热搜zset */
public static String getHotSearchKey(){
return SEARCH + SPLIT + HOT_SEARCH;
}
/** * 每个搜索记录的时间戳记录key-value */
public static String getSearchTimeKey(String searchKey){
return SEARCH + SPLIT + SEARCH_TIME + SPLIT + searchKey;
}
}

File diff suppressed because it is too large Load Diff