fix:数据总览 接口性能优化

This commit is contained in:
2026-03-16 11:22:48 +08:00
parent c066fe2c42
commit c00657b426
7 changed files with 345 additions and 119 deletions

View File

@@ -12,6 +12,7 @@ import com.sdm.data.model.req.*;
import com.sdm.data.service.IDimensionTemplateService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.PostMapping;
@@ -31,6 +32,7 @@ import java.util.List;
@RestController
@RequestMapping("/dataOverView")
@Tag(name = "数据总览", description = "项目维度文件数据展示")
@Slf4j
public class DataOverviewController {
@Autowired
IDimensionTemplateService dimensionTemplateService;
@@ -47,7 +49,7 @@ public class DataOverviewController {
long start = System.currentTimeMillis();
SdmResponse<List<FileMetadataChildrenDTO>> simulationNodeTree = dimensionTemplateService.getSimulationNodeTree(req);
long end = System.currentTimeMillis();
System.out.println("getSimulationNodeTree方法耗时" + (end - start) + "毫秒,入参:" + req.toString());
log.info("getSimulationNodeTree方法耗时" + (end - start) + "毫秒,入参:" + req.toString());
return simulationNodeTree;
}

View File

@@ -15,7 +15,7 @@ import java.util.List;
public class FileMetadataChildrenDTO extends FileMetadataInfoResp {
@Schema(description = "子节点列表")
private List<FileMetadataInfoResp> mergeSameNameChildren;
private List<FileMetadataChildrenDTO> mergeSameNameChildren;
/**
* 聚合文件ID列表

View File

@@ -3,6 +3,8 @@ package com.sdm.data.service;
import com.sdm.common.entity.enums.FilePermissionEnum;
import com.sdm.data.model.entity.FileUserPermission;
import com.baomidou.mybatisplus.extension.service.IService;
import java.util.List;
import java.util.Map;
/**
* <p>
@@ -29,4 +31,12 @@ public interface IFileUserPermissionService extends IService<FileUserPermission>
* @return 权限位掩码
*/
Integer getMergedPermission(Long fileId, Long userId);
/**
* 批量获取文件权限位掩码
* @param fileIds 文件ID列表
* @param userId 用户ID
* @return 文件ID到权限位掩码的映射
*/
Map<Long, Integer> getMergedPermissionBatch(List<Long> fileIds, Long userId);
}

View File

@@ -369,77 +369,72 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
*/
private List<FileMetadataChildrenDTO> mergeNodeDirInfos(List<FileMetadataInfo> nodeDirInfos) {
// 设置节点的nodeCode
long start1 = System.currentTimeMillis();
Map<String, String> nodeCodeMap = getNodeCodeMapByUuid(nodeDirInfos.stream().map(FileMetadataInfo::getRelatedResourceUuid).distinct().toList());
nodeDirInfos.forEach(item -> item.setNodeCode(nodeCodeMap.get(item.getRelatedResourceUuid())));
// 按originalName分组
Map<String, List<FileMetadataInfo>> groupedByOriginalName = nodeDirInfos.stream()
.collect(Collectors.groupingBy(FileMetadataInfo::getOriginalName));
List<FileMetadataChildrenDTO> result = new ArrayList<>();
// 对每组进行处理
for (Map.Entry<String, List<FileMetadataInfo>> entry : groupedByOriginalName.entrySet()) {
List<FileMetadataInfo> group = entry.getValue();
if (!group.isEmpty()) {
// 使用第一个作为基础对象
FileMetadataInfo baseInfo = group.get(0);
// 创建包含children的DTO对象
FileMetadataChildrenDTO dto = FileMetadataConvert.INSTANCE.convertToFileMetadataChildrenDTO(baseInfo);
// 设置文件的节点信息tag1-tag10
hierarchyHelper.setTagReqFromFileMetadata(baseInfo, dto);
// 填充children的文件类型标签信息
fileDictTagQueryService.fillFileTagsForRespList(Collections.singletonList(dto), FileMetadataInfoResp::getId);
// 转换children列表并为每个child设置totalName
List<FileMetadataInfoResp> children = new ArrayList<>();
for (FileMetadataInfo fileInfo : group) {
FileMetadataChildrenDTO childDto = FileMetadataConvert.INSTANCE.convertToFileMetadataChildrenDTO(fileInfo);
// 设置文件的节点信息tag1-tag10
hierarchyHelper.setTagReqFromFileMetadata(fileInfo, childDto);
// 设置totalName从bucketName中提取
String objectKey = fileInfo.getObjectKey();
String baseDirPath = DirTypeEnum.PROJECT_NODE_DIR.getDirName() + "/";
if (objectKey != null && objectKey.startsWith(baseDirPath)) {
String totalName = objectKey.substring(baseDirPath.length());
childDto.setTotalName(totalName);
}
// 目前已经移除了学科节点学科信息作为task节点的字段附属信息
if(ObjectUtils.isNotEmpty(dto.getOwntaskId()) ||
(NodeTypeEnum.TASK.getValue().equalsIgnoreCase(dto.getRelatedResourceUuidOwnType())&&ObjectUtils.isNotEmpty(dto.getRelatedResourceUuid()))){
GetTaskDetailReq getTaskDetailReq = new GetTaskDetailReq();
getTaskDetailReq.setRelatedResourceUuid(ObjectUtils.isNotEmpty(dto.getOwntaskId()) ? dto.getOwntaskId() : dto.getRelatedResourceUuid());
SdmResponse<SpdmTaskVo> taskDetail = simulationTaskFeignClient.getTaskDetail(getTaskDetailReq);
if(taskDetail.isSuccess()){
childDto.setOwnDisciplineName(taskDetail.getData().getDiscipline());
}
}
childDto.setPermissionValue(fileUserPermissionService.getMergedPermission(fileInfo.getId(), ThreadLocalContext.getUserId()));
children.add(childDto);
}
// 填充children的文件类型标签信息
fileDictTagQueryService.fillFileTagsForRespList(children, FileMetadataInfoResp::getId);
// 目前已经移除了学科节点学科信息作为task节点的字段附属信息
if(ObjectUtils.isNotEmpty(dto.getOwntaskId()) ||
(NodeTypeEnum.TASK.getValue().equalsIgnoreCase(dto.getRelatedResourceUuidOwnType())&&ObjectUtils.isNotEmpty(dto.getRelatedResourceUuid()))){
GetTaskDetailReq getTaskDetailReq = new GetTaskDetailReq();
getTaskDetailReq.setRelatedResourceUuid(ObjectUtils.isNotEmpty(dto.getOwntaskId()) ? dto.getOwntaskId() : dto.getRelatedResourceUuid());
SdmResponse<SpdmTaskVo> taskDetail = simulationTaskFeignClient.getTaskDetail(getTaskDetailReq);
if(taskDetail.isSuccess()){
dto.setOwnDisciplineName(taskDetail.getData().getDiscipline());
}
}
dto.setPermissionValue(fileUserPermissionService.getMergedPermission(baseInfo.getId(), ThreadLocalContext.getUserId()));
dto.setMergeSameNameChildren(children);
dto.setFileIds(children.stream().map(FileMetadataInfoResp::getId).toList());
result.add(dto);
long start2 = System.currentTimeMillis();
// 先把所有FileMetadataInfo转换成FileMetadataChildrenDTO后续统一做批量设置
List<FileMetadataInfoResp> allChildren = new ArrayList<>();
Map<String, List<FileMetadataChildrenDTO>> groupedChildren = new HashMap<>();
for (FileMetadataInfo fileInfo : nodeDirInfos) {
FileMetadataChildrenDTO childDto = FileMetadataConvert.INSTANCE.convertToFileMetadataChildrenDTO(fileInfo);
// 设置totalName从bucketName中提取
String objectKey = fileInfo.getObjectKey();
String baseDirPath = DirTypeEnum.PROJECT_NODE_DIR.getDirName() + "/";
if (objectKey != null && objectKey.startsWith(baseDirPath)) {
String totalName = objectKey.substring(baseDirPath.length());
childDto.setTotalName(totalName);
}
// 目前已经移除了学科节点学科信息作为task节点的字段附属信息
if (ObjectUtils.isNotEmpty(childDto.getOwntaskId()) ||
(NodeTypeEnum.TASK.getValue().equalsIgnoreCase(childDto.getRelatedResourceUuidOwnType()) && ObjectUtils.isNotEmpty(childDto.getRelatedResourceUuid()))) {
GetTaskDetailReq getTaskDetailReq = new GetTaskDetailReq();
getTaskDetailReq.setRelatedResourceUuid(ObjectUtils.isNotEmpty(childDto.getOwntaskId()) ? childDto.getOwntaskId() : childDto.getRelatedResourceUuid());
SdmResponse<SpdmTaskVo> taskDetail = simulationTaskFeignClient.getTaskDetail(getTaskDetailReq);
if (taskDetail.isSuccess()) {
childDto.setOwnDisciplineName(taskDetail.getData().getDiscipline());
}
}
allChildren.add(childDto);
groupedChildren.computeIfAbsent(fileInfo.getOriginalName(), key -> new ArrayList<>()).add(childDto);
}
long start3 = System.currentTimeMillis();
// 批量设置文件权限
List<Long> permissionFileIds = allChildren.stream()
.map(FileMetadataInfoResp::getId)
.filter(Objects::nonNull)
.toList();
Map<Long, Integer> permissionMap = fileUserPermissionService.getMergedPermissionBatch(
permissionFileIds,
ThreadLocalContext.getUserId()
);
for (FileMetadataInfoResp childDto : allChildren) {
Integer permissionValue = permissionMap.get(childDto.getId());
childDto.setPermissionValue(permissionValue == null ? 0 : permissionValue);
}
long start4= System.currentTimeMillis();
// 批量设置文件的节点信息tag1-tag10
hierarchyHelper.setTagReqFromFileMetadataBatch(allChildren, FileMetadataInfoResp::getId);
long start5 = System.currentTimeMillis();
// 批量填充文件类型标签信息
fileDictTagQueryService.fillFileTagsForRespList(allChildren, FileMetadataInfoResp::getId);
long start6 = System.currentTimeMillis();
// 设置聚合结果
List<FileMetadataChildrenDTO> result = new ArrayList<>();
groupedChildren.values().forEach(children -> {
FileMetadataChildrenDTO baseDto = new FileMetadataChildrenDTO();
BeanUtils.copyProperties(children.get(0),baseDto);
baseDto.setMergeSameNameChildren(children);
baseDto.setFileIds(children.stream().map(FileMetadataInfoResp::getId).toList());
result.add(baseDto);
});
if(CollectionUtils.isNotEmpty(result)) {
// 判断是否存在PHASE类型的节点
boolean hasPhaseNode = result.stream()
@@ -480,6 +475,14 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
return maxCreateTime2.compareTo(maxCreateTime1); // 倒序排序
});
}
log.info("getSimulationNodeTree 方法耗时 " +
"设置节点的 nodeCode" + (start2-start1) + "毫秒," +
"批量转换成FileMetadataChildrenDTO" + (start3-start2) + "毫秒," +
"批量设置文件权限:" + (start4-start3) + "毫秒," +
"批量设置文件的节点信息tag1-tag10" + (start5-start4) + "毫秒," +
"批量填充文件类型标签信息:" + (start6-start5) + "毫秒," +
"结果排序:" + (System.currentTimeMillis() - start6) + "毫秒");
return result;
}
@@ -542,7 +545,8 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
if(NodeTypeEnum.TASK.getValue().equalsIgnoreCase(nodeType)){
runFileInfos = fileMetadataInfoService.lambdaQuery()
.eq(FileMetadataInfo::getRelatedResourceUuidOwnType, NodeTypeEnum.RUN.getValue())
.in(FileMetadataInfo::getParentId, fileIds).list();
.in(FileMetadataInfo::getParentId, fileIds)
.isNull(FileMetadataInfo::getDeletedAt).list();
}else if(NodeTypeEnum.RUN.getValue().equalsIgnoreCase(nodeType)){
runFileInfos = nodeFileInfos;
} else if (tagMapService.isNodeType(nodeType)) {

View File

@@ -14,6 +14,7 @@ import org.springframework.stereotype.Component;
import java.math.BigDecimal;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
@@ -304,6 +305,50 @@ public class FileMetadataHierarchyHelper {
}
}
/**
* 批量为响应对象设置tagReq合并单条逻辑避免重复查询
*
* @param respList 响应对象列表
* @param idGetter 获取文件ID的函数
* @param <T> 响应对象类型
*/
public <T> void setTagReqFromFileMetadataBatch(List<T> respList, Function<T, Long> idGetter) {
if (CollectionUtils.isEmpty(respList) || idGetter == null) {
return;
}
List<Long> fileIds = respList.stream()
.map(idGetter)
.filter(Objects::nonNull)
.distinct()
.toList();
if (CollectionUtils.isEmpty(fileIds)) {
return;
}
List<FileMetadataInfo> files = getFileMetadataByIds(fileIds);
if (CollectionUtils.isEmpty(files)) {
return;
}
Map<Long, FileMetadataInfo> fileMap = files.stream()
.collect(Collectors.toMap(FileMetadataInfo::getId, item -> item, (left, right) -> left));
Map<String, String> uuidNameMap = buildUuidNameMap(files);
for (T resp : respList) {
Long fileId = idGetter.apply(resp);
if (fileId == null) {
continue;
}
FileMetadataInfo file = fileMap.get(fileId);
if (file != null) {
setTagReqFromFileMetadataInternal(file, resp, uuidNameMap);
}
}
}
/**
* 直接使用FileMetadataInfo中已存储的tag1~tag10、taskId、runId构建并设置tagReq
* 并基于relatedResourceUuid查询对应originalName填充tagName/taskName/runName
@@ -312,6 +357,54 @@ public class FileMetadataHierarchyHelper {
if (Objects.isNull(file) || Objects.isNull(resp)) {
return;
}
Map<String, String> uuidNameMap = buildUuidNameMap(Collections.singletonList(file));
setTagReqFromFileMetadataInternal(file, resp, uuidNameMap);
}
private Map<String, String> buildUuidNameMap(List<FileMetadataInfo> files) {
Set<String> relatedUuids = new HashSet<>();
for (FileMetadataInfo file : files) {
if (file == null) {
continue;
}
try {
for (int i = 1; i <= 10; i++) {
Object tagValue = FileMetadataInfo.class.getMethod("getTag" + i).invoke(file);
String value = Objects.toString(tagValue, null);
parseUuidChain(value).forEach(relatedUuids::add);
}
} catch (Exception e) {
log.warn("解析tag值失败", e);
}
if (StringUtils.isNotBlank(file.getTaskId())) {
relatedUuids.add(file.getTaskId());
}
if (StringUtils.isNotBlank(file.getRunId())) {
relatedUuids.add(file.getRunId());
}
}
if (CollectionUtils.isEmpty(relatedUuids)) {
return Collections.emptyMap();
}
List<FileMetadataInfo> tagNameFiles = fileMetadataInfoService.lambdaQuery()
.eq(FileMetadataInfo::getTenantId, ThreadLocalContext.getTenantId())
.in(FileMetadataInfo::getRelatedResourceUuid, relatedUuids)
.isNull(FileMetadataInfo::getDeletedAt)
.list();
return tagNameFiles.stream()
.filter(Objects::nonNull)
.filter(item -> StringUtils.isNotBlank(item.getRelatedResourceUuid()))
.collect(Collectors.toMap(
FileMetadataInfo::getRelatedResourceUuid,
FileMetadataInfo::getOriginalName,
(left, right) -> left
));
}
private <T> void setTagReqFromFileMetadataInternal(FileMetadataInfo file, T resp, Map<String, String> uuidNameMap) {
try {
Class<?> tagReqClass = Class.forName("com.sdm.common.entity.req.data.TagReq");
Object tagReq = tagReqClass.getDeclaredConstructor().newInstance();
@@ -329,35 +422,6 @@ public class FileMetadataHierarchyHelper {
tagReqClass.getMethod("setTaskId", String.class).invoke(tagReq, taskId);
tagReqClass.getMethod("setRunId", String.class).invoke(tagReq, runId);
Set<String> relatedUuids = new HashSet<>();
for (String tagValue : tagValues) {
parseUuidChain(tagValue).forEach(relatedUuids::add);
}
if (StringUtils.isNotBlank(taskId)) {
relatedUuids.add(taskId);
}
if (StringUtils.isNotBlank(runId)) {
relatedUuids.add(runId);
}
Map<String, String> uuidNameMap = new HashMap<>();
if (CollectionUtils.isNotEmpty(relatedUuids)) {
List<FileMetadataInfo> tagNameFiles = fileMetadataInfoService.lambdaQuery()
.eq(FileMetadataInfo::getTenantId, ThreadLocalContext.getTenantId())
.in(FileMetadataInfo::getRelatedResourceUuid, relatedUuids)
.isNull(FileMetadataInfo::getDeletedAt)
.list();
uuidNameMap = tagNameFiles.stream()
.filter(Objects::nonNull)
.filter(item -> StringUtils.isNotBlank(item.getRelatedResourceUuid()))
.collect(Collectors.toMap(
FileMetadataInfo::getRelatedResourceUuid,
FileMetadataInfo::getOriginalName,
(left, right) -> left
));
}
for (int i = 1; i <= 10; i++) {
String tagValue = tagValues.get(i - 1);
List<String> uuidChain = parseUuidChain(tagValue);

View File

@@ -17,7 +17,14 @@ import org.apache.commons.lang3.ObjectUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* <p>
@@ -158,4 +165,139 @@ public class FileUserPermissionServiceImpl extends ServiceImpl<FileUserPermissio
return currentPerm;
}
@Override
public Map<Long, Integer> getMergedPermissionBatch(List<Long> fileIds, Long userId) {
// 批量计算:一次性拿到所有需要的节点与父节点信息,再合并权限
Map<Long, Integer> result = new HashMap<>();
if (fileIds == null || fileIds.isEmpty() || userId == null) {
return result;
}
// 超级管理员直接给满权限
if (isSuperAdmin(userId)) {
for (Long fileId : fileIds) {
if (fileId != null) {
result.put(fileId, (int) FilePermissionEnum.ALL.getValue());
}
}
return result;
}
// pendingIds还未查询到元数据的节点集合包含原始文件ID与其父节点ID
LinkedHashSet<Long> pendingIds = new LinkedHashSet<>(fileIds);
// processedIds已经查询过元数据的节点ID避免重复处理
Set<Long> processedIds = new HashSet<>();
// parentMap记录每个节点的父节点ID用于后续递归合并权限
Map<Long, Long> parentMap = new HashMap<>();
// originalNameMap记录文件名用于基础目录判定/排查),当前阶段仅做缓存
Map<Long, String> originalNameMap = new HashMap<>();
// 批量向上查父节点,直到所有链路都被解析出来
while (!pendingIds.isEmpty()) {
List<Long> batchIds = new ArrayList<>(pendingIds);
pendingIds.clear();
List<FileMetadataInfo> fileInfos = fileMetadataInfoService.listByIds(batchIds);
for (FileMetadataInfo fileInfo : fileInfos) {
if (fileInfo == null || fileInfo.getId() == null) {
continue;
}
Long id = fileInfo.getId();
if (processedIds.contains(id)) {
continue;
}
processedIds.add(id);
parentMap.put(id, fileInfo.getParentId());
originalNameMap.put(id, fileInfo.getOriginalName());
// 基础目录默认满权限,直接写入结果缓存
boolean isBaseDir = false;
for (DirTypeEnum dirType : DirTypeEnum.getInitSpmdDir()) {
if (fileInfo.getOriginalName().equals(dirType.getDirName())) {
isBaseDir = true;
break;
}
}
if (isBaseDir) {
result.put(id, (int) FilePermissionEnum.ALL.getValue());
}
// 追加父节点到待处理集合,继续向上追溯
Long parentId = fileInfo.getParentId();
if (parentId != null && parentId > 0 && !processedIds.contains(parentId)) {
pendingIds.add(parentId);
}
}
}
// 批量查询“显式授权”记录,构建权限映射
Map<Long, Integer> explicitPermMap = new HashMap<>();
List<Long> allIdsForPerm = new ArrayList<>(processedIds);
List<FileUserPermission> userPermRecords = this.lambdaQuery()
.eq(FileUserPermission::getUserId, userId)
.in(FileUserPermission::getTFilemetaId, allIdsForPerm)
.list();
for (FileUserPermission record : userPermRecords) {
if (record.getTFilemetaId() != null) {
explicitPermMap.put(record.getTFilemetaId(), (int) record.getPermission());
}
}
// mergedPermCache缓存已计算的合并权限避免递归重复计算
Map<Long, Integer> mergedPermCache = new HashMap<>();
for (Long id : processedIds) {
if (result.containsKey(id)) {
mergedPermCache.put(id, result.get(id));
}
}
// 基于 parentMap 递归计算每个节点的合并权限
for (Long id : processedIds) {
if (mergedPermCache.containsKey(id)) {
continue;
}
mergedPermCache.put(id, calculateMergedPermissionFromCache(
id,
explicitPermMap,
parentMap,
mergedPermCache
));
}
// 按输入顺序回填结果
for (Long fileId : fileIds) {
if (fileId == null) {
continue;
}
Integer value = mergedPermCache.get(fileId);
result.put(fileId, value == null ? (int) FilePermissionEnum.ZERO.getValue() : value);
}
return result;
}
private int calculateMergedPermissionFromCache(Long fileId,
Map<Long, Integer> explicitPermMap,
Map<Long, Long> parentMap,
Map<Long, Integer> mergedPermCache) {
Integer cached = mergedPermCache.get(fileId);
if (cached != null) {
return cached;
}
int currentPerm = explicitPermMap.getOrDefault(fileId, (int) FilePermissionEnum.READ.getValue());
Long parentId = parentMap.get(fileId);
if (parentId == null || parentId <= 0) {
mergedPermCache.put(fileId, currentPerm);
return currentPerm;
}
int parentPerm = calculateMergedPermissionFromCache(parentId, explicitPermMap, parentMap, mergedPermCache);
int merged = currentPerm | parentPerm;
mergedPermCache.put(fileId, merged);
return merged;
}
}

View File

@@ -28,15 +28,19 @@
<!-- 第一个子查询:文件(必执行) -->
SELECT distinct file_metadata_info.*
FROM file_metadata_info
WHERE parentId IN (
<foreach collection="parentIds" item="parentId" separator=",">
#{parentId}
</foreach>
)
and tenantId = #{tenantId}
AND dataType = 2
AND isLatest = true
AND deletedAt IS NULL
<where>
<if test="parentIds != null and parentIds.size() > 0">
parentId IN (
<foreach collection="parentIds" item="parentId" separator=",">
#{parentId}
</foreach>
)
</if>
and tenantId = #{tenantId}
AND dataType = 2
AND isLatest = true
AND deletedAt IS NULL
</where>
<!-- 动态判断uuids 不为空且有元素时,才拼接 UNION ALL + 第二个子查询:普通文件夹和节点文件夹 -->
<if test="fileIds != null and fileIds.size() > 0">
@@ -47,20 +51,20 @@
<if test="filterEmptyData != null and filterEmptyData">
LEFT JOIN file_storage ON file_metadata_info.id = file_storage.dirId
</if>
WHERE
file_metadata_info.tenantId = #{tenantId}
<where>
file_metadata_info.tenantId = #{tenantId}
and
file_metadata_info.id IN (
<foreach collection="fileIds" item="fileId" separator=",">
#{fileId}
</foreach>
)
AND file_metadata_info.deletedAt IS NULL
and
file_metadata_info.id IN (
<foreach collection="fileIds" item="fileId" separator=",">
#{fileId}
</foreach>
)
AND file_metadata_info.deletedAt IS NULL
<if test="filterEmptyData != null and filterEmptyData">
AND file_storage.fileId IS NOT NULL
</if>
<if test="filterEmptyData != null and filterEmptyData">
AND file_storage.fileId IS NOT NULL
</if>
</where>
)
</if>