package com.ruoyi.article.listener;
|
|
import com.alibaba.excel.context.AnalysisContext;
|
import com.alibaba.excel.read.listener.ReadListener;
|
import com.alibaba.excel.util.ListUtils;
|
import com.alibaba.fastjson2.JSON;
|
import com.ruoyi.article.controller.management.dto.MgtSensitiveWordsDTO;
|
import com.ruoyi.article.domain.SensitiveWords;
|
import com.ruoyi.article.service.ISensitiveWordsService;
|
import com.ruoyi.common.core.utils.page.BeanUtils;
|
import java.util.List;
|
import lombok.extern.slf4j.Slf4j;
|
|
/**
|
* @author mitao
|
* @date 2024/9/11
|
*/
|
@Slf4j
|
public class SensitiveWordsListener implements ReadListener<MgtSensitiveWordsDTO> {
|
|
/**
|
* 每隔100条存储数据库,实际使用中可以100条,然后清理list ,方便内存回收
|
*/
|
private static final int BATCH_COUNT = 100;
|
|
/**
|
* 缓存的数据
|
*/
|
private List<MgtSensitiveWordsDTO> cachedDataList = ListUtils.newArrayListWithExpectedSize(
|
BATCH_COUNT);
|
|
private ISensitiveWordsService sensitiveWordsService;
|
|
/**
|
* 如果使用了spring,请使用这个构造方法。每次创建Listener的时候需要把spring管理的类传进来
|
*
|
* @param sensitiveWordsService
|
*/
|
public SensitiveWordsListener(ISensitiveWordsService sensitiveWordsService) {
|
this.sensitiveWordsService = sensitiveWordsService;
|
}
|
|
@Override
|
public void invoke(MgtSensitiveWordsDTO data, AnalysisContext analysisContext) {
|
log.info("解析到一条数据:{}", JSON.toJSONString(data));
|
cachedDataList.add(data);
|
// 达到BATCH_COUNT了,需要去存储一次数据库,防止数据几万条数据在内存,容易OOM
|
if (cachedDataList.size() >= BATCH_COUNT) {
|
saveData();
|
// 存储完成清理 list
|
cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
|
}
|
}
|
|
private void saveData() {
|
log.info("{}条数据,开始存储数据库!", cachedDataList.size());
|
// 拷贝数据
|
List<SensitiveWords> sensitiveWords = BeanUtils.copyList(cachedDataList,
|
SensitiveWords.class);
|
// 批量存入数据库
|
sensitiveWordsService.saveBatch(sensitiveWords);
|
}
|
|
@Override
|
public void doAfterAllAnalysed(AnalysisContext analysisContext) {
|
// 这里也要保存数据,确保最后遗留的数据也存储到数据库
|
saveData();
|
log.info("所有数据解析完成!");
|
}
|
}
|