You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
compose-analysis/src/main/java/com/keyware/composeanalysis/task/LineAnalysisTask.java

394 lines
18 KiB

7 months ago
package com.keyware.composeanalysis.task;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.lang.Pair;
import cn.hutool.core.util.StrUtil;
import com.keyware.common.constant.enums.AnalysisStatusEnum;
7 months ago
import com.keyware.composeanalysis.constant.FixedValue;
import com.keyware.composeanalysis.constant.FunctionAndAnalysisAssemblyConst;
import com.keyware.composeanalysis.constant.RedisConst;
import com.keyware.composeanalysis.constant.SolrDBConst;
import com.keyware.composeanalysis.constant.enums.AnalysisLevelEnum;
import com.keyware.composeanalysis.constant.enums.FileAnalysisStatusEnum;
import com.keyware.composeanalysis.entity.AnalysisTask;
import com.keyware.composeanalysis.mongo.FileDataMongoDto;
import com.keyware.composeanalysis.mongo.LineDataMongoDto;
import com.keyware.composeanalysis.mongo.MatchOpenFile;
import com.keyware.composeanalysis.mongo.MatchOpenFileMongoDto;
import com.keyware.composeanalysis.solr.VersionTree;
import com.keyware.composeanalysis.util.*;
7 months ago
import com.keyware.keyswan.anaysis.Analysis;
import com.keyware.keyswan.anaysis.AnalysisFactory;
import com.keyware.keyswan.common.CodeFile;
import com.keyware.utils.IdGenerator;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Update;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.stream.Collectors;
7 months ago
import static org.springframework.data.mongodb.core.query.Criteria.where;
/**
* @author liuzongren
* @ClassName LineAnalysisTask
* @description: 行级别 特征提取定时任务
* @datetime 2024年 07月 25日 16:19
* @version: 1.0
*/
@Log4j2
public class LineAnalysisTask extends IAnalysisTask {
private MongoTemplate mongoTemplate;
private AnalysisTask analysisTask;
//被测件的文件信息
private FileDataMongoDto analysisFile;
private SolrUtils solrUtils;
private RedisUtil redisUtil;
private CountDownLatch countDownLatch;
public LineAnalysisTask(AnalysisTask analysisTask, FileDataMongoDto analysisFile, MongoTemplate mongoTemplate, CountDownLatch countDownLatch) {
this.mongoTemplate = mongoTemplate;
this.analysisTask = analysisTask;
this.analysisFile = analysisFile;
this.countDownLatch = countDownLatch;
this.solrUtils = SpringContextUtils.getBean(SolrUtils.class);
this.redisUtil = SpringContextUtils.getBean(RedisUtil.class);
}
/**
* 行级别 源代码溯源
* 当前任务 需要在 文件级分析完成后 进行
*/
@Override
public void run() {
//执行任务前,判断一下任务执行的状态
Object status = redisUtil.get(String.format(RedisConst.TASK_RUNNING_STATUS_KEY_PREFIX, analysisTask.getId()));
if (status != null && (status.equals(AnalysisStatusEnum.STOP_ANALYSIS.getCode()) || status.equals(AnalysisStatusEnum.PAUSE_ANALYSIS.getCode()))) {
log.info("任务已取消,fileName:{}", analysisFile.getName());
countDownLatch.countDown();
return;
}
//获取文件地址
String filePath = analysisFile.getFileUrl();
7 months ago
//获取文件名称
String fileName = analysisFile.getName();
AnalysisLogUtil.insert(mongoTemplate, "【行级特征提取】正在提取" + fileName);
try {
7 months ago
Analysis analysis = AnalysisFactory.getAnalysis(filePath);
//获取文件行级特征md5
CodeFile codeFile = analysis.analysisFile(filePath, FunctionAndAnalysisAssemblyConst.LINE_EXTRACT, FunctionAndAnalysisAssemblyConst.LINE_EXTRACT);
7 months ago
//从solr中获取特征相似的 文件
SolrDocumentList featureSimilarityFromSolr = getFeatureSimilarityFromSolr(codeFile);
7 months ago
//计算文件的开源率
doAnalysis(featureSimilarityFromSolr, codeFile);
7 months ago
//更新文件表的分析状态为3 行级特征以分析完毕
analysisFile.setFileAnalysisStatus(FileAnalysisStatusEnum.ANALYSIS_DONE.getCode());
mongoTemplate.update(FileDataMongoDto.class)
.matching(where("_id").is(analysisFile.getId()))
.replaceWith(analysisFile)
.findAndReplace();
//插入日志
AnalysisLogUtil.insert(mongoTemplate, "【行级分析】完成" + fileName);
7 months ago
log.info("文件" + fileName + ":行级分析完成");
} catch (Exception e) {
AnalysisLogUtil.insertErrorInfo(mongoTemplate, "【行级分析】失败" + fileName, e);
log.error("文件:{}行级别分析失败!", fileName,e);
7 months ago
//修改当前文件分析状态未失败
mongoTemplate.update(FileDataMongoDto.class)
.matching(where("_id").is(analysisFile.getId()))
.apply(new Update().set("fileAnalysisStatus", FileAnalysisStatusEnum.FAILED_ANALYSIS.getCode()))
.first();
} finally {
countDownLatch.countDown();
}
}
/**
* 计算开源率 被测件的开源率
*
* @param matcheOpenSourceFiles
* @param codeFile 文件解析结果
7 months ago
*/
private void doAnalysis(SolrDocumentList matcheOpenSourceFiles, CodeFile codeFile) {
//根据文件后缀判断需要查询的文件版本库名称
String versionIdCoreName = FixedValue.SUFFIX_SOLR_VERSION.get(analysisFile.getSuffix());
7 months ago
if (CollectionUtil.isEmpty(matcheOpenSourceFiles)) {
//因为行的特征库较少,这里补充一个对比逻辑,如果当前文件解析失败,或者没有通过代码块匹配到数据,则直接通过文件的md5 再次查询一次solr库
checkByOriginalFileMd5(versionIdCoreName, analysisFile.getMd5());
7 months ago
return;
}
//保存所有匹配的行数信息,方便统计总的匹配行数
Set<String> matchedFeatureMd5 = new HashSet<>();
//保存所有匹配的行数信息,方便统计总的匹配行数
Set<Integer> matchLineRowsNum = new HashSet<>();
//获取文件总特征行数
String traitFileLineMd5 = codeFile.getTraitFileLineMd5();
List<String> lineFeatureList = Arrays.asList(traitFileLineMd5.split(","));
//统计每个文件的开源率
List<MatchOpenFile> matchOpenFilesRes = calculateSimilarityAndOpenRate(matcheOpenSourceFiles, codeFile, versionIdCoreName, matchLineRowsNum, matchedFeatureMd5);
//计算文件的总体特征相似度
BigDecimal featureSimilarity = new BigDecimal(matchedFeatureMd5.size()).divide(new BigDecimal(lineFeatureList.size()), 4, RoundingMode.HALF_UP).multiply(new BigDecimal(100)).setScale(2);
//计算文件的总体开源率
BigDecimal openRate = new BigDecimal(matchLineRowsNum.size()).divide(new BigDecimal(analysisFile.getCodeRowNum()), 4, RoundingMode.HALF_UP).multiply(new BigDecimal(100)).setScale(2);
//获取开源率的阈值
Integer openRateThreshold = analysisTask.getOpenRateThreshold();
//如果开源率大于阈值,则将当前文件设置成开源
if (openRate.compareTo(new BigDecimal(openRateThreshold)) >= 0) {
analysisFile.setOpenType(true);
}
7 months ago
//保存当前文件的开源信息到mongo库中
7 months ago
MatchOpenFileMongoDto matchOpenFileMongo = new MatchOpenFileMongoDto();
matchOpenFileMongo.setId(IdGenerator.uuid32())
.setFilePath(analysisFile.getFileUrl())
.setFileName(analysisFile.getName())
.setFeatureSimilarity(featureSimilarity.floatValue())
.setOpenRate(openRate.floatValue())
.setOpenType(analysisFile.getOpenType())
.setMatchOpenFile(matchOpenFilesRes);
log.info("文件" + analysisFile.getName() + ":开源率:" + openRate.floatValue() + ",特征相似度:" + featureSimilarity.floatValue());
mongoTemplate.save(matchOpenFileMongo);
}
7 months ago
/**
* 计算当前文件的特征相似度 开源率
*
* @param matchOpenFiles 通过MD5 匹配到的所有开源文件
* @param sourceFileBaseCoreName 当前文件特征文件的 solr coreName
* @param matchLineRowsNum 所有开源文件匹配到的开源行号列表
* @param matchFeatureLineMd5s 所有开源文件匹配到的特征行MD5
* @return 匹配的开源文件解析后的结果集
*/
private List<MatchOpenFile> calculateSimilarityAndOpenRate(SolrDocumentList matchOpenFiles, CodeFile fileAnalysisRes, String sourceFileBaseCoreName, Set<Integer> matchLineRowsNum, Set<String> matchFeatureLineMd5s) {
//匹配的开源文件列表
List<MatchOpenFile> matchOpenFilesRes = new ArrayList<>();
//首先根据文件的MD5查询开源文件的版本ID,和路径信息
Set<String> openSourceFileMd5s = matchOpenFiles.stream().map(doc -> (String) doc.get("sourceMd5")).collect(Collectors.toSet());
Map<String, SolrDocument> md5VersionInfoMap = solrUtils.batchQueryVersionIdFromSourceFileBaseBySourceMd5(sourceFileBaseCoreName, openSourceFileMd5s);
//根据版本ID查询版本的详细信息
//todo 这里 查询一个版本的信息 需要检索 两个 solr 库 而且还需要检索 versioinTree 后面需要优化
Set<String> openSourceFileVersionIds = md5VersionInfoMap.values().stream().map(doc -> (String) doc.get("versionId")).collect(Collectors.toSet());
List<VersionTree> versionTrees = solrUtils.queryBatchVersionInfoByVersionIds(openSourceFileVersionIds);
Map<String, VersionTree> versionIdVersionInfoMap = versionTrees.stream().collect(Collectors.toMap(VersionTree::getVersionId, java.util.function.Function.identity()));
String traitFileLineMd5 = fileAnalysisRes.getTraitFileLineMd5();
List<String> lineFeatureList = Arrays.asList(traitFileLineMd5.split(","));
for (SolrDocument openSourceFile : matchOpenFiles) {
//开源文件MD5
String openSourceFileMd5 = openSourceFile.getFieldValue("sourceMd5").toString();
//解析文件的特征行
String lineFeatureMd5s = (String) openSourceFile.get("tz_line_hay");
List<String> openFileLineFeatures = Arrays.asList(lineFeatureMd5s.split(","));
//获取开源文件的文本信息
String openSourceContent = solrUtils.getOpenFileContentByMd5(openSourceFileMd5);
//当前文件匹配特征行总行数
int currentFileMatchFeatureLineCount = 0;
//遍历函数特征MD5
for (String lineFeatureMd5 : lineFeatureList) {
//源文件的特征行列表
for (String openFileLineFeature : openFileLineFeatures) {
if (lineFeatureMd5.equals(openFileLineFeature)) {
matchFeatureLineMd5s.add(lineFeatureMd5);
currentFileMatchFeatureLineCount++;
7 months ago
}
}
}
//当前文件的开源率
Pair<Float, HashSet<Integer>> openRateAndSaveRowNum = SimilarityUtil.getOpenRateAndSaveRowNum(fileAnalysisRes.getSourceFileContent(), openSourceContent);
7 months ago
//将当前文件匹配的行号,存储到缓存中,方便统计整体的开源率
matchLineRowsNum.addAll(openRateAndSaveRowNum.getValue());
7 months ago
//统计当前文件的特征相似度
BigDecimal featureSimilarity = new BigDecimal(currentFileMatchFeatureLineCount).divide(new BigDecimal(lineFeatureList.size()), 4, RoundingMode.HALF_UP).multiply(new BigDecimal(100)).setScale(2);
SolrDocument openEntries = md5VersionInfoMap.get(openSourceFileMd5);
VersionTree versionInfo = versionIdVersionInfoMap.get(openEntries.get("versionId"));
if (versionInfo == null) {
log.error("根据版本ID,未查询到相关的版本信息。versionId:{}", openEntries.get("versionId"));
continue;
}
7 months ago
//组装当前开源文件的开源项目信息
7 months ago
MatchOpenFile matchOpenFileInfo = new MatchOpenFile();
matchOpenFileInfo.setPId(versionInfo.getProId())
.setPName(versionInfo.getProName())
.setSourceUrl((String) openEntries.get("fullPath"))
.setFeatureSimilarity(featureSimilarity.floatValue())
.setOpenRate(openRateAndSaveRowNum.getKey())
7 months ago
.setVersion(versionInfo.getVersionName())
.setLicenseType(versionInfo.getLicenseType())
.setAnalyzeType(AnalysisLevelEnum.FUNCTION_LEVEL.getCode());
matchOpenFilesRes.add(matchOpenFileInfo);
7 months ago
}
return matchOpenFilesRes;
}
7 months ago
/**
* 防止代码块特征库不全再次根据文件MD5查询开源文件信息, 做二次校验
*
* @param originalFileMd5
* @param versionIdCoreName
*/
private void checkByOriginalFileMd5(String versionIdCoreName, String originalFileMd5) {
7 months ago
//根据文件的MD5,查询特征库,看当前文件是否在开源代码库中
SolrDocument versionIdAndPath = solrUtils.queryOne(versionIdCoreName, "sourceFileMd5:" + originalFileMd5, "versionId,fullPath,sourceFileMd5");
7 months ago
if (versionIdAndPath != null) {
//根据版本ID查询版本的详细信息
VersionTree versionInfo = solrUtils.queryVersionInfoByVersionId((String) versionIdAndPath.get("versionId"));
if (versionInfo != null) {
//当前开源文件的开源项目信息
MatchOpenFile matchOpenFileInfo = new MatchOpenFile();
matchOpenFileInfo.setPId(versionInfo.getProId())
.setPName(versionInfo.getProName())
.setSourceUrl(versionInfo.getDownUrl())
.setFeatureSimilarity(100.00f)
.setOpenRate(100.00f)
.setAnalyzeType(AnalysisLevelEnum.FILE_LEVEL.getCode());
//保存当前文件的开源信息到mongo库中
MatchOpenFileMongoDto matchOpenFileMongo = new MatchOpenFileMongoDto();
matchOpenFileMongo.setId(IdGenerator.uuid32())
.setFilePath(analysisFile.getFileUrl())
.setFileName(analysisFile.getName())
.setOpenRate(100.00f)
.setOpenType(analysisFile.getOpenType())
.setMatchOpenFile(Arrays.asList(matchOpenFileInfo));
mongoTemplate.save(matchOpenFileMongo);
}
}
7 months ago
}
/**
* 将特征值插入到mongo库中
* @param features 特征集合
* @param lineDataMongoDto 当前分析任务 特征信息存储
* todo 后期 看看有没有插入的必要
7 months ago
* @param
*/
@Deprecated
private void insertFeatureValue(String features, LineDataMongoDto lineDataMongoDto) {
String[] featureMd5Arr = {};
if (StringUtils.isNotBlank(features)) {
featureMd5Arr = features.split(",");
}
List<String> lineFeatures = Arrays.asList(featureMd5Arr);
List<String> batchInsertList = new ArrayList<>();
if (CollectionUtil.isNotEmpty(lineFeatures)) {
//这里的批量插入逻辑可以进行校验
//每10条存一次,解析的数据量如果过大,可能会超过MongoDB数据限制
int batchInsertStpe = 5000;
int total = 0;
for (int i = 0; i < lineFeatures.size(); i++) {
if (total != batchInsertStpe) {
batchInsertList.add(lineFeatures.get(i));
total++;
}
if (i == lineFeatures.size() - 1 && total != batchInsertStpe) {
total = 0;
lineDataMongoDto.setId(IdGenerator.uuid32())
.setLineFeatueMd5s(batchInsertList);
mongoTemplate.insert(lineDataMongoDto);
}
if (total == batchInsertStpe) {
total = 0;
lineDataMongoDto.setId(IdGenerator.uuid32())
.setLineFeatueMd5s(batchInsertList);
mongoTemplate.insert(lineDataMongoDto);
batchInsertList.clear();
}
}
} else {
lineDataMongoDto.setId(IdGenerator.uuid32());
mongoTemplate.insert(lineDataMongoDto);
}
}
/**
* 根据 特征值 从特征库中检索 具有特征相似的
*
* @param codeFile 行特征信息
7 months ago
* @return
*/
private SolrDocumentList getFeatureSimilarityFromSolr(CodeFile codeFile) {
Set<String> queryMd5Set = new HashSet<>();
//每一行原内容MD5值集合
String cutFileLineMd5 = codeFile.getCutFileLineMd5();
if (StrUtil.isNotBlank(cutFileLineMd5)) {
List<String> lineCutList = Arrays.asList(cutFileLineMd5.split(","));
queryMd5Set.addAll(lineCutList);
}
//每一行特征内容MD5值集合
String traitFileLineMd5 = codeFile.getTraitFileLineMd5();
if (StrUtil.isNotBlank(traitFileLineMd5)) {
List<String> lineFeatureList = Arrays.asList(traitFileLineMd5.split(","));
queryMd5Set.addAll(lineFeatureList);
}
if (CollectionUtil.isEmpty(queryMd5Set)) {
log.error("特征相似度检索失败,特征为空:{}", analysisFile.getName());
return new SolrDocumentList();
}
7 months ago
//拼接行特征查询条件
String queryStr = "tz_line_hay:(" + StringUtils.join(queryMd5Set, " OR ") + ")";
log.info("查询条件: solrCoreName:{},queryStr:{}", SolrDBConst.CORE_NAME_SOURCE_FILE_INFO_TEMP, queryStr);
SolrDocumentList result = solrUtils.query(SolrDBConst.CORE_NAME_SOURCE_FILE_INFO_TEMP, queryStr, "sourceMd5,tz_line_hay");
7 months ago
return result;
}
}