数据总览 数据检索报错 优化

数据总览、数据查询、数据存储 全量展示文件所属项目节点信息
This commit is contained in:
2026-01-05 16:23:33 +08:00
parent 5981cdad6c
commit a7a043ac09
13 changed files with 5812 additions and 234 deletions

View File

@@ -0,0 +1,54 @@
package com.sdm.common.entity.resp;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
@Data
public class BaseResp {
//格式化后的文件大小
@Schema(description = "格式化后的文件大小")
private String formatFileSize;
@Schema(description = "所属项目")
String ownProjectName;
@Schema(description = "所属项目id")
private String ownProjectId;
@Schema(description = "所属阶段")
String ownPhaseName;
@Schema(description = "所属阶段id")
private String ownPhaseId;
@Schema(description = "所属学科")
String ownDisciplineName;
@Schema(description = "所属学科id")
private String ownDisciplineId;
@Schema(description = "所属机器")
private String ownMachineName;
@Schema(description = "所属机器id")
private String ownMachineId;
@Schema(description = "所属工作空间")
private String ownWorkspaceName;
@Schema(description = "所属工作空间id")
private String ownWorkspaceId;
@Schema(description = "所属工况任务")
String owntaskName;
@Schema(description = "所属工况任务id")
private String owntaskId;
@Schema(description = "所属算列")
String ownRunName;
@Schema(description = "所属算列id")
private String ownRunId;
}

View File

@@ -3,6 +3,7 @@ package com.sdm.common.entity.resp.data;
import com.baomidou.mybatisplus.annotation.TableField;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.sdm.common.entity.resp.BaseResp;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
@@ -16,7 +17,7 @@ import java.util.List;
@Data
@Schema(name = "FileMetadataDTO", description = "文件元数据传输对象")
@JsonIgnoreProperties(ignoreUnknown = true)
public class FileMetadataInfoResp implements Serializable {
public class FileMetadataInfoResp extends BaseResp implements Serializable {
private static final long serialVersionUID = 1L;

View File

@@ -1,6 +1,7 @@
package com.sdm.common.entity.resp.data;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.sdm.common.entity.resp.BaseResp;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
@@ -11,7 +12,7 @@ import java.time.LocalDateTime;
*/
@Data
@Schema(description = "任务结果曲线响应对象")
public class SimulationTaskResultCurveResp {
public class SimulationTaskResultCurveResp extends BaseResp {
@Schema(description = "文件id")
private Long id;
@@ -34,26 +35,4 @@ public class SimulationTaskResultCurveResp {
@Schema(description = "创建时间")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private LocalDateTime createTime;
//格式化后的文件大小
@Schema(description = "格式化后的文件大小")
private String formatFileSize;
@Schema(description = "所属项目")
private String projectName;
@Schema(description = "所属项目id")
private String projectId;
@Schema(description = "所属阶段")
private String phaseName;
@Schema(description = "所属阶段id")
private String phaseId;
@Schema(description = "所属学科")
private String disciplineName;
@Schema(description = "所属学科id")
private String disciplineId;
}

View File

@@ -5,6 +5,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
@Data
@@ -34,6 +35,11 @@ public class FileSearchReq extends BaseReq {
@Schema(description = "父目录ID")
private Long parentDirId;
/**
* 父目录集合下搜索
*/
List<Long> parentDirIds = new ArrayList<>();
/**
* 父节点id
*/

View File

@@ -6,6 +6,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
@Data
@@ -15,6 +16,11 @@ public class QueryBigFileReq extends BaseReq {
*/
private Long dirId;
/**
* 目录ID列表
*/
private List<Long> dirIds = new ArrayList<>();
/**
* 目录类型 DirTypeEnum
*/

View File

@@ -3,23 +3,20 @@ package com.sdm.data.model.resp;
import com.baomidou.mybatisplus.annotation.FieldStrategy;
import com.baomidou.mybatisplus.annotation.TableField;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.sdm.common.entity.resp.BaseResp;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.time.LocalDateTime;
@Data
public class ListBigFileResp {
public class ListBigFileResp extends BaseResp {
@Schema(description = "文件id")
private Long id;
@Schema(description = "文件原始名称")
private String originalName;
//格式化后的文件大小
@Schema(description = "格式化后的文件大小")
private String formatFileSize;
@Schema(description= "approvalStatus")
private String approvalStatus;
@@ -33,36 +30,6 @@ public class ListBigFileResp {
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private LocalDateTime createTime;
@Schema(description = "所属项目")
String ownProjectName;
@Schema(description = "所属项目id")
private String ownProjectId;
@Schema(description = "所属阶段")
String ownPhaseName;
@Schema(description = "所属阶段id")
private String ownPhaseId;
@Schema(description = "所属学科")
String ownDisciplineName;
@Schema(description = "所属学科id")
private String ownDisciplineId;
@Schema(description = "所属工况任务")
String owntaskName;
@Schema(description = "所属工况任务id")
private String owntaskId;
@Schema(description = "所属算列")
String ownRunName;
@Schema(description = "所属算列id")
private String ownRunId;
@Schema(description = "仿真执行人")
String executorName;
}

View File

@@ -4,9 +4,7 @@ import com.alibaba.fastjson2.JSONObject;
import com.github.pagehelper.PageInfo;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.common.ThreadLocalContext;
import com.sdm.common.entity.enums.NodeTypeEnum;
import com.sdm.common.entity.resp.PageDataResp;
import com.sdm.common.utils.FileSizeUtils;
import com.sdm.common.utils.PageUtils;
import com.sdm.data.model.entity.FileMetadataInfo;
import com.sdm.data.model.entity.FileStorage;
@@ -27,7 +25,6 @@ import org.springframework.stereotype.Service;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.InputStreamReader;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.stream.Collectors;
@@ -44,6 +41,9 @@ public class DataAnalysisServiceImpl implements IDataAnalysisService {
@Autowired
private IMinioService MinIOService;
@Autowired
private FileMetadataHierarchyHelper hierarchyHelper;
@Override
public SdmResponse<PageDataResp<List<SimulationTaskResultCurveResp>>> getSimulationTaskFile(GetSimulationTaskFileReq getSimulationTaskFileReq) {
// 1. 构造查询条件
@@ -72,71 +72,15 @@ public class DataAnalysisServiceImpl implements IDataAnalysisService {
return PageUtils.getJsonObjectSdmResponse(new ArrayList<>(), new PageInfo<>());
}
// 3. 获取当前页文件的详细元数据
// 3. 一站式处理:获取文件元数据、构建父目录缓存、转换为响应对象并设置层级信息
List<Long> fileIdList = fileStorages.stream().map(FileStorage::getFileId).toList();
List<FileMetadataInfo> currentFiles = fileMetadataInfoService.lambdaQuery()
.eq(FileMetadataInfo::getTenantId, ThreadLocalContext.getTenantId())
.in(FileMetadataInfo::getId, fileIdList)
.list();
List<SimulationTaskResultCurveResp> finalResultList = hierarchyHelper.processFileHierarchy(
fileIdList,
SimulationTaskResultCurveResp.class,
FileMetadataHierarchyHelper::setFileHierarchy
);
// 批量分层获取所有相关的父目录
// key 是 IDvalue 是对应的元数据实体。用于在内存中快速查找。
Map<Long, FileMetadataInfo> parentCacheMap = new HashMap<>();
// 当前需要去数据库查的父级 ID 集合
Set<Long> nextFetchIds = currentFiles.stream()
.map(FileMetadataInfo::getParentId)
.filter(pid -> pid != null && pid != 0)
.collect(Collectors.toSet());
int safetyDepth = 0; // 防死循环计数器
// 只要还有没查过的父 ID且深度在合理范围内10层就继续批量查
while (CollectionUtils.isNotEmpty(nextFetchIds) && safetyDepth < 10) {
// 一次性查出当前这一层所有的父节点信息
List<FileMetadataInfo> parents = fileMetadataInfoService.listByIds(nextFetchIds);
if (CollectionUtils.isEmpty(parents)) break;
nextFetchIds = new HashSet<>(); // 重置,准备收集下一层 ID
for (FileMetadataInfo p : parents) {
parentCacheMap.put(p.getId(), p);
// 如果这个父节点还有上级,且我们之前没查过这个上级,就加进下一次查询列表
if (p.getParentId() != null && p.getParentId() != 0 && !parentCacheMap.containsKey(p.getParentId())) {
nextFetchIds.add(p.getParentId());
}
}
safetyDepth++;
}
// 内存组装数据:将 FileMetadata 转换为 Response并回溯层级信息
List<SimulationTaskResultCurveResp> finalResultList = currentFiles.stream().map(file -> {
SimulationTaskResultCurveResp resp = new SimulationTaskResultCurveResp();
BeanUtils.copyProperties(file, resp);
resp.setFormatFileSize(FileSizeUtils.formatFileSize(BigDecimal.valueOf(file.getFileSize())));
// 从 parentCacheMap 中回溯,设置项目、阶段、专业信息
Long pid = file.getParentId();
int limit = 0;
// 这里的循环完全在内存中进行,速度极快且不产生日志
while (pid != null && parentCacheMap.containsKey(pid) && limit < 15) {
FileMetadataInfo folder = parentCacheMap.get(pid);
String ownType = folder.getRelatedResourceUuidOwnType();
if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) {
resp.setProjectName(folder.getOriginalName());
resp.setProjectId(folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) {
resp.setPhaseName(folder.getOriginalName());
resp.setPhaseId(folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) {
resp.setDisciplineName(folder.getOriginalName());
resp.setDisciplineId(folder.getRelatedResourceUuid());
}
pid = folder.getParentId();
limit++;
}
return resp;
}).toList();
// 6. 构造分页信息并返回
// 4. 构造分页信息并返回
PageInfo<FileMetadataInfo> pageInfo = new PageInfo<>();
pageInfo.setTotal(pageDataResp.getTotal());
pageInfo.setPageNum(pageDataResp.getCurrentPage());

View File

@@ -7,7 +7,6 @@ import com.github.pagehelper.PageInfo;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.common.ThreadLocalContext;
import com.sdm.common.entity.enums.ApproveFileDataTypeEnum;
import com.sdm.common.entity.enums.NodeTypeEnum;
import com.sdm.common.entity.req.system.UserListReq;
import com.sdm.common.entity.resp.PageDataResp;
import com.sdm.common.entity.resp.system.CIDUserResp;
@@ -65,6 +64,9 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis {
@Autowired
IFileStorageQuotaService fileStorageQuotaService;
@Autowired
FileMetadataHierarchyHelper hierarchyHelper;
public SdmResponse<List<JSONObject>> getNodeSizeByNodeType(String queryNodeType, String queryNodeName, Integer intervalMonths, String targetYm) {
// uuid是项目的uuid
List<FileMetadataInfo> nodeList = fileMetadataInfoService.lambdaQuery()
@@ -248,77 +250,15 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis {
return PageUtils.getJsonObjectSdmResponse(new ArrayList<>(), new PageInfo<>());
}
// 3. 获取当前页文件的详细元数据
// 2. 一站式处理:获取文件元数据、构建父目录缓存、转换为响应对象并设置层级信息
List<Long> fileIdList = fileStorages.stream().map(FileStorage::getFileId).toList();
List<FileMetadataInfo> currentFiles = fileMetadataInfoService.lambdaQuery()
.eq(FileMetadataInfo::getTenantId, ThreadLocalContext.getTenantId())
.in(FileMetadataInfo::getId, fileIdList)
.list();
List<ListBigFileResp> finalResultList = hierarchyHelper.processFileHierarchy(
fileIdList,
ListBigFileResp.class,
FileMetadataHierarchyHelper::setFileHierarchy
);
// 批量分层获取所有相关的父目录
// key 是 IDvalue 是对应的元数据实体。用于在内存中快速查找。
Map<Long, FileMetadataInfo> parentCacheMap = new HashMap<>();
// 当前需要去数据库查的父级 ID 集合
Set<Long> nextFetchIds = currentFiles.stream()
.map(FileMetadataInfo::getParentId)
.filter(pid -> pid != null && pid != 0)
.collect(Collectors.toSet());
int safetyDepth = 0; // 防死循环计数器
// 只要还有没查过的父 ID且深度在合理范围内10层就继续批量查
while (org.apache.commons.collections4.CollectionUtils.isNotEmpty(nextFetchIds) && safetyDepth < 10) {
// 一次性查出当前这一层所有的父节点信息
List<FileMetadataInfo> parents = fileMetadataInfoService.listByIds(nextFetchIds);
if (org.apache.commons.collections4.CollectionUtils.isEmpty(parents)) break;
nextFetchIds = new HashSet<>(); // 重置,准备收集下一层 ID
for (FileMetadataInfo p : parents) {
parentCacheMap.put(p.getId(), p);
// 如果这个父节点还有上级,且我们之前没查过这个上级,就加进下一次查询列表
if (p.getParentId() != null && p.getParentId() != 0 && !parentCacheMap.containsKey(p.getParentId())) {
nextFetchIds.add(p.getParentId());
}
}
safetyDepth++;
}
// 内存组装数据:将 FileMetadata 转换为 Response并回溯层级信息
List<ListBigFileResp> finalResultList = currentFiles.stream().map(file -> {
ListBigFileResp resp = new ListBigFileResp();
BeanUtils.copyProperties(file, resp);
resp.setFormatFileSize(FileSizeUtils.formatFileSize(BigDecimal.valueOf(file.getFileSize())));
// 从 parentCacheMap 中回溯,设置项目、阶段、专业信息
Long pid = file.getParentId();
int limit = 0;
// 这里的循环完全在内存中进行,速度极快且不产生日志
while (pid != null && parentCacheMap.containsKey(pid) && limit < 15) {
FileMetadataInfo folder = parentCacheMap.get(pid);
String ownType = folder.getRelatedResourceUuidOwnType();
if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) {
resp.setOwnProjectName(folder.getOriginalName());
resp.setOwnProjectId(folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) {
resp.setOwnPhaseName(folder.getOriginalName());
resp.setOwnPhaseId(folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) {
resp.setOwnDisciplineName(folder.getOriginalName());
resp.setOwnDisciplineId(folder.getRelatedResourceUuid());
}else if (NodeTypeEnum.TASK.getValue().equals(ownType)) {
resp.setOwntaskName(folder.getOriginalName());
resp.setOwntaskId(folder.getRelatedResourceUuid());
}if (NodeTypeEnum.RUN.getValue().equals(ownType)) {
resp.setOwnRunName(folder.getOriginalName());
resp.setOwnRunId(folder.getRelatedResourceUuid());
}
pid = folder.getParentId();
limit++;
}
return resp;
}).toList();
// 6. 构造分页信息并返回
// 3. 构造分页信息并返回
PageInfo<FileMetadataInfo> pageInfo = new PageInfo<>();
pageInfo.setTotal(pageDataResp.getTotal());
pageInfo.setPageNum(pageDataResp.getCurrentPage());
@@ -339,6 +279,9 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis {
if (queryBigFileReq.getFileSize() != null && queryBigFileReq.getFileSizeUnit() != null) {
fileSizeInBytes = convertToBytes(queryBigFileReq.getFileSize(), queryBigFileReq.getFileSizeUnit());
}
if(ObjectUtils.isNotEmpty((queryBigFileReq.getDirId()))){
queryBigFileReq.getDirIds().add(queryBigFileReq.getDirId());
}
Long tenantId = ThreadLocalContext.getTenantId();
PageHelper.startPage(queryBigFileReq.getCurrent(), queryBigFileReq.getSize());
List<FileStorage> list = fileStorageService.selectBigFiles(queryBigFileReq, fileSizeInBytes, tenantId);

View File

@@ -74,6 +74,9 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
@Autowired
private SysUserFeignClientImpl sysUserFeignClient;
@Autowired
private FileMetadataHierarchyHelper hierarchyHelper;
@Override
@Transactional(rollbackFor = Exception.class)
public SdmResponse saveDimensionTemplateWithHierarchies(TemplateCreationRequest request) {
@@ -291,17 +294,22 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
List<FileMetadataInfo> group = entry.getValue();
if (!group.isEmpty()) {
FileMetadataHierarchyHelper.FileHierarchyResult filesWithParentCache = hierarchyHelper.getFilesWithParentCache(group.stream().map(FileMetadataInfo::getId).toList());
Map<Long, FileMetadataInfo> parentCacheMap = filesWithParentCache.getParentCacheMap();
// 使用第一个作为基础对象
FileMetadataInfo baseInfo = group.get(0);
// 创建包含children的DTO对象
FileMetadataChildrenDTO dto = FileMetadataConvert.INSTANCE.convertToFileMetadataChildrenDTO(baseInfo);
hierarchyHelper.backtrackAndSetHierarchy(baseInfo, parentCacheMap, dto, FileMetadataHierarchyHelper::setFileHierarchy);
// 转换children列表并为每个child设置totalName
List<FileMetadataInfoResp> children = new ArrayList<>();
for (FileMetadataInfo fileInfo : group) {
FileMetadataChildrenDTO childDto = FileMetadataConvert.INSTANCE.convertToFileMetadataChildrenDTO(fileInfo);
hierarchyHelper.backtrackAndSetHierarchy(fileInfo, parentCacheMap, childDto, FileMetadataHierarchyHelper::setFileHierarchy);
// 设置totalName从bucketName中提取
String objectKey = fileInfo.getObjectKey();
String baseDirPath = DirTypeEnum.PROJECT_NODE_DIR.getDirName() + "/";
@@ -319,22 +327,24 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
}
}
// 根据children中的最大创建时间对result进行倒序排序
result.sort((dto1, dto2) -> {
LocalDateTime maxCreateTime1 = dto1.getMergeSameNameChildren().stream()
.map(FileMetadataInfoResp::getCreateTime)
.filter(Objects::nonNull)
.max(LocalDateTime::compareTo)
.orElse(LocalDateTime.MIN);
if(CollectionUtils.isNotEmpty(result)) {
// 根据children中的最大创建时间对result进行倒序排序
result.sort((dto1, dto2) -> {
LocalDateTime maxCreateTime1 = dto1.getMergeSameNameChildren().stream()
.map(FileMetadataInfoResp::getCreateTime)
.filter(Objects::nonNull)
.max(LocalDateTime::compareTo)
.orElse(LocalDateTime.MIN);
LocalDateTime maxCreateTime2 = dto2.getMergeSameNameChildren().stream()
.map(FileMetadataInfoResp::getCreateTime)
.filter(Objects::nonNull)
.max(LocalDateTime::compareTo)
.orElse(LocalDateTime.MIN);
LocalDateTime maxCreateTime2 = dto2.getMergeSameNameChildren().stream()
.map(FileMetadataInfoResp::getCreateTime)
.filter(Objects::nonNull)
.max(LocalDateTime::compareTo)
.orElse(LocalDateTime.MIN);
return maxCreateTime2.compareTo(maxCreateTime1); // 倒序排序
});
return maxCreateTime2.compareTo(maxCreateTime1); // 倒序排序
});
}
return result;
}

View File

@@ -0,0 +1,357 @@
package com.sdm.data.service.impl;
import com.sdm.common.common.ThreadLocalContext;
import com.sdm.common.entity.enums.NodeTypeEnum;
import com.sdm.common.utils.FileSizeUtils;
import com.sdm.data.model.entity.FileMetadataInfo;
import com.sdm.data.service.IFileMetadataInfoService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.math.BigDecimal;
import java.util.*;
import java.util.function.BiConsumer;
import java.util.stream.Collectors;
/**
* 文件元数据层级信息帮助类
* 用于递归查询父目录信息并设置层级关系
*/
@Slf4j
@Component
public class FileMetadataHierarchyHelper {
@Autowired
private IFileMetadataInfoService fileMetadataInfoService;
/**
* 默认递归深度限制
*/
private static final int DEFAULT_MAX_DEPTH = 10;
/**
* 回溯时的安全循环限制
*/
private static final int BACKTRACK_LIMIT = 15;
/**
* 一站式方法根据文件ID列表获取文件元数据构建层级缓存转换为响应对象并设置层级信息
*
* @param fileIdList 文件ID列表
* @param respClass 响应类型
* @param hierarchySetter 层级信息设置器
* @param <T> 响应类型
* @return 响应对象列表
*/
public <T> List<T> processFileHierarchy(
List<Long> fileIdList,
Class<T> respClass,
HierarchySetter<T> hierarchySetter) {
if (CollectionUtils.isEmpty(fileIdList)) {
return new ArrayList<>();
}
// 1. 获取文件元数据
List<FileMetadataInfo> currentFiles = getFileMetadataByIds(fileIdList);
if (CollectionUtils.isEmpty(currentFiles)) {
return new ArrayList<>();
}
// 2. 构建父目录缓存
Map<Long, FileMetadataInfo> parentCacheMap = buildParentCacheMap(currentFiles);
// 3. 转换为响应对象并设置层级信息
return convertToRespWithHierarchy(currentFiles, parentCacheMap, respClass, hierarchySetter);
}
/**
* 获取文件元数据并构建父目录缓存(用于需要在中间步骤做更多处理的场景)
*
* @param fileIdList 文件ID列表
* @return 文件元数据和父目录缓存的结果对象
*/
public FileHierarchyResult getFilesWithParentCache(List<Long> fileIdList) {
if (CollectionUtils.isEmpty(fileIdList)) {
return new FileHierarchyResult(new ArrayList<>(), new HashMap<>());
}
List<FileMetadataInfo> currentFiles = getFileMetadataByIds(fileIdList);
Map<Long, FileMetadataInfo> parentCacheMap = buildParentCacheMap(currentFiles);
return new FileHierarchyResult(currentFiles, parentCacheMap);
}
/**
* 文件层级查询结果封装类
*/
public static class FileHierarchyResult {
private final List<FileMetadataInfo> files;
private final Map<Long, FileMetadataInfo> parentCacheMap;
public FileHierarchyResult(List<FileMetadataInfo> files, Map<Long, FileMetadataInfo> parentCacheMap) {
this.files = files;
this.parentCacheMap = parentCacheMap;
}
public List<FileMetadataInfo> getFiles() {
return files;
}
public Map<Long, FileMetadataInfo> getParentCacheMap() {
return parentCacheMap;
}
}
/**
* 根据文件ID列表获取文件元数据
*
* @param fileIdList 文件ID列表
* @return 文件元数据列表
*/
public List<FileMetadataInfo> getFileMetadataByIds(List<Long> fileIdList) {
if (CollectionUtils.isEmpty(fileIdList)) {
return new ArrayList<>();
}
return fileMetadataInfoService.lambdaQuery()
.eq(FileMetadataInfo::getTenantId, ThreadLocalContext.getTenantId())
.in(FileMetadataInfo::getId, fileIdList)
.list();
}
/**
* 批量分层获取所有相关的父目录构建父目录缓存Map
*
* @param currentFiles 当前文件列表
* @return key 是 IDvalue 是对应的元数据实体
*/
public Map<Long, FileMetadataInfo> buildParentCacheMap(List<FileMetadataInfo> currentFiles) {
return buildParentCacheMap(currentFiles, DEFAULT_MAX_DEPTH);
}
/**
* 批量分层获取所有相关的父目录构建父目录缓存Map
*
* @param currentFiles 当前文件列表
* @param maxDepth 最大递归深度
* @return key 是 IDvalue 是对应的元数据实体
*/
public Map<Long, FileMetadataInfo> buildParentCacheMap(List<FileMetadataInfo> currentFiles, int maxDepth) {
Map<Long, FileMetadataInfo> parentCacheMap = new HashMap<>();
if (CollectionUtils.isEmpty(currentFiles)) {
return parentCacheMap;
}
// 当前需要去数据库查的父级 ID 集合
Set<Long> nextFetchIds = currentFiles.stream()
.map(FileMetadataInfo::getParentId)
.filter(pid -> pid != null && pid != 0)
.collect(Collectors.toSet());
int safetyDepth = 0; // 防死循环计数器
// 只要还有没查过的父 ID且深度在合理范围内就继续批量查
while (CollectionUtils.isNotEmpty(nextFetchIds) && safetyDepth < maxDepth) {
// 一次性查出当前这一层所有的父节点信息
List<FileMetadataInfo> parents = fileMetadataInfoService.listByIds(nextFetchIds);
if (CollectionUtils.isEmpty(parents)) {
break;
}
nextFetchIds = new HashSet<>(); // 重置,准备收集下一层 ID
for (FileMetadataInfo p : parents) {
parentCacheMap.put(p.getId(), p);
// 如果这个父节点还有上级,且我们之前没查过这个上级,就加进下一次查询列表
if (p.getParentId() != null && p.getParentId() != 0 && !parentCacheMap.containsKey(p.getParentId())) {
nextFetchIds.add(p.getParentId());
}
}
safetyDepth++;
}
return parentCacheMap;
}
/**
* 将文件列表转换为响应对象列表,并设置层级信息
*
* @param currentFiles 当前文件列表
* @param parentCacheMap 父目录缓存Map
* @param respClass 响应类型
* @param hierarchySetter 层级信息设置器
* @param <T> 响应类型
* @return 响应对象列表
*/
public <T> List<T> convertToRespWithHierarchy(
List<FileMetadataInfo> currentFiles,
Map<Long, FileMetadataInfo> parentCacheMap,
Class<T> respClass,
HierarchySetter<T> hierarchySetter) {
if (CollectionUtils.isEmpty(currentFiles)) {
return new ArrayList<>();
}
return currentFiles.stream().map(file -> {
try {
T resp = respClass.getDeclaredConstructor().newInstance();
BeanUtils.copyProperties(file, resp);
// 设置格式化文件大小
if (file.getFileSize() != null) {
setFormatFileSize(resp, file.getFileSize());
}
// 从 parentCacheMap 中回溯,设置层级信息
backtrackAndSetHierarchy(file, parentCacheMap, resp, hierarchySetter);
return resp;
} catch (Exception e) {
log.error("转换文件元数据到响应对象失败", e);
throw new RuntimeException("转换失败", e);
}
}).toList();
}
/**
* 回溯父目录并设置层级信息
*/
public <T> void backtrackAndSetHierarchy(
FileMetadataInfo file,
Map<Long, FileMetadataInfo> parentCacheMap,
T resp,
HierarchySetter<T> hierarchySetter) {
Long pid = file.getParentId();
int limit = 0;
// 这里的循环完全在内存中进行,速度极快且不产生日志
while (pid != null && parentCacheMap.containsKey(pid) && limit < BACKTRACK_LIMIT) {
FileMetadataInfo folder = parentCacheMap.get(pid);
String ownType = folder.getRelatedResourceUuidOwnType();
// 调用层级信息设置器
hierarchySetter.setHierarchyInfo(resp, folder, ownType);
pid = folder.getParentId();
limit++;
}
}
/**
* 通过反射设置格式化文件大小
*/
private <T> void setFormatFileSize(T resp, Long fileSize) {
try {
String formatFileSize = FileSizeUtils.formatFileSize(BigDecimal.valueOf(fileSize));
resp.getClass().getMethod("setFormatFileSize", String.class).invoke(resp, formatFileSize);
} catch (NoSuchMethodException e) {
// 该响应类没有此方法,忽略
} catch (Exception e) {
log.warn("设置格式化文件大小失败", e);
}
}
/**
* 层级信息设置器函数式接口
*
* @param <T> 响应类型
*/
@FunctionalInterface
public interface HierarchySetter<T> {
/**
* 设置层级信息
*
* @param resp 响应对象
* @param folder 父文件夹元数据
* @param ownType 节点类型
*/
void setHierarchyInfo(T resp, FileMetadataInfo folder, String ownType);
}
// ================ 预定义的层级信息设置器 ================
/**
* SimulationTaskResultCurveResp 的层级设置器
* 设置 projectName/projectId, phaseName/phaseId, disciplineName/disciplineId
*/
public static <T> void setSimulationTaskHierarchy(T resp, FileMetadataInfo folder, String ownType) {
try {
if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) {
resp.getClass().getMethod("setProjectName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setProjectId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) {
resp.getClass().getMethod("setPhaseName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setPhaseId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) {
resp.getClass().getMethod("setDisciplineName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setDisciplineId", String.class).invoke(resp, folder.getRelatedResourceUuid());
}
} catch (Exception e) {
log.warn("设置SimulationTask层级信息失败", e);
}
}
/**
* ListBigFileResp 的层级设置器
* 设置 ownProjectName/ownProjectId, ownPhaseName/ownPhaseId,
* ownDisciplineName/ownDisciplineId, owntaskName/owntaskId, ownRunName/ownRunId
*/
public static <T> void setListBigFileHierarchy(T resp, FileMetadataInfo folder, String ownType) {
try {
if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnProjectName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnProjectId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnPhaseName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnPhaseId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnDisciplineName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnDisciplineId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.TASK.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwntaskName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwntaskId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.RUN.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnRunName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnRunId", String.class).invoke(resp, folder.getRelatedResourceUuid());
}
} catch (Exception e) {
log.warn("设置ListBigFile层级信息失败", e);
}
}
public static <T> void setFileHierarchy(T resp, FileMetadataInfo folder, String ownType) {
try {
if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnProjectName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnProjectId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnPhaseName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnPhaseId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnDisciplineName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnDisciplineId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.MACHINE.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnMachineName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnMachineId", String.class).invoke(resp, folder.getRelatedResourceUuid());
}if (NodeTypeEnum.WORKSPACE.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnWorkspaceName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnWorkspaceId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.TASK.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwntaskName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwntaskId", String.class).invoke(resp, folder.getRelatedResourceUuid());
} else if (NodeTypeEnum.RUN.getValue().equals(ownType)) {
resp.getClass().getMethod("setOwnRunName", String.class).invoke(resp, folder.getOriginalName());
resp.getClass().getMethod("setOwnRunId", String.class).invoke(resp, folder.getRelatedResourceUuid());
}
} catch (Exception e) {
log.warn("设置ListBigFile层级信息失败", e);
}
}
}

View File

@@ -147,6 +147,9 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
@Autowired
private FileApproveExecutor fileApproveExecutor;
@Autowired
private FileMetadataHierarchyHelper hierarchyHelper;
@Autowired
@Qualifier(value = "nonSensitiveTaskPool")
private Executor nonSensitiveTaskPool;
@@ -599,20 +602,30 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
@Override
public SdmResponse fileSearch(FileSearchReq minioFileSearchReq) {
if(ObjectUtils.isNotEmpty(minioFileSearchReq.getParentDirId())){
minioFileSearchReq.getParentDirIds().add(minioFileSearchReq.getParentDirId());
}
QueryBigFileReq queryBigFileReq = new QueryBigFileReq();
Long dirId;
List<Long> dirIds = new ArrayList<>();
Integer dirType;
if (ObjectUtils.isNotEmpty(minioFileSearchReq.getParentUuid())) {
// 项目节点下搜索文件
FileMetadataInfo fileMetadataInfo = fileMetadataInfoService.lambdaQuery().eq(FileMetadataInfo::getRelatedResourceUuid, minioFileSearchReq.getParentUuid()).one();
dirId = fileMetadataInfo.getId();
if(ObjectUtils.isEmpty(fileMetadataInfo)){
return SdmResponse.success();
}
dirIds.add(fileMetadataInfo.getId());
dirType = fileMetadataInfo.getDirType();
} else if (ObjectUtils.isNotEmpty(minioFileSearchReq.getParentDirId())) {
} else if (ObjectUtils.isNotEmpty(minioFileSearchReq.getParentDirIds())) {
// 知识库的文件查询
FileMetadataInfo fileMetadataInfo = fileMetadataInfoService.getById(minioFileSearchReq.getParentDirId());
dirId = fileMetadataInfo.getId();
dirType = fileMetadataInfo.getDirType();
List<FileMetadataInfo> fileMetadataInfos = fileMetadataInfoService.listByIds(minioFileSearchReq.getParentDirIds());
if(ObjectUtils.isEmpty(fileMetadataInfos)){
return SdmResponse.success();
}
dirIds.addAll(fileMetadataInfos.stream().map(FileMetadataInfo::getId).toList());
dirType = fileMetadataInfos.get(0).getDirType();
} else if (ObjectUtils.isNotEmpty(minioFileSearchReq.getDirType())) {
dirType = minioFileSearchReq.getDirType();
DirTypeEnum dirTypeByValue = DirTypeEnum.getDirTypeByValue(dirType);
@@ -629,7 +642,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
}
// 获取根目录的 id
dirId = fileMetadataInfoByObjectKey.get().getId();
dirIds.add(fileMetadataInfoByObjectKey.get().getId());
}else {
return SdmResponse.failed("请选择目录类型:1 知识库文件夹2 项目节点文件夹3 头像库文件夹4 仿真参数库文件夹,5 训练模型文件夹");
}
@@ -638,7 +651,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
queryBigFileReq.setIsLatest(true);
queryBigFileReq.setCurrent(minioFileSearchReq.getCurrent());
queryBigFileReq.setSize(minioFileSearchReq.getSize());
queryBigFileReq.setDirId(dirId);
queryBigFileReq.setDirIds(dirIds);
if (Objects.equals(DirTypeEnum.KNOWLEDGE_BASE_DIR.getValue(), dirType)) {
// 知识库文件:排除新增在审批的文件
queryBigFileReq.setApproveTypeList(fileDatdList);
@@ -654,30 +667,30 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
SdmResponse<PageDataResp<List<FileStorage>>> searchResult = dataStorageAnalysis.listBigFile(queryBigFileReq);
List<Long> fileIdList =searchResult.getData().getData().stream().map(FileStorage::getFileId).collect(Collectors.toList());
List<Long> fileIdList = searchResult.getData().getData().stream().map(FileStorage::getFileId).collect(Collectors.toList());
if(CollectionUtils.isEmpty(fileIdList)){
return SdmResponse.success();
}
List<FileMetadataInfo> files = fileMetadataInfoService
.lambdaQuery()
.in(FileMetadataInfo::getId, fileIdList)
.list();
// 使用 hierarchyHelper 获取文件元数据和父目录缓存
FileMetadataHierarchyHelper.FileHierarchyResult hierarchyResult = hierarchyHelper.getFilesWithParentCache(fileIdList);
List<FileMetadataInfo> files = hierarchyResult.getFiles();
Map<Long, FileMetadataInfo> parentCacheMap = hierarchyResult.getParentCacheMap();
setCreatorNames(files);
setCidInfos(files);
setProjectName(files);
setAnalysisDirectionName(files);
setSimulationPoolAndTaskInfo(files);
List<FileMetadataInfoResp> dtoList = files.stream().map(entity -> {
FileMetadataInfoResp dto = new FileMetadataInfoResp();
BeanUtils.copyProperties(entity, dto);
//计算当前用户对该文件的综合权限位
// 对于列表查询,如果层级很深,频繁递归会有性能问题。
dto.setPermissionValue(fileUserPermissionService.getMergedPermission(entity.getId(), ThreadLocalContext.getUserId()));
return dto;
}).collect(Collectors.toList());
List<FileMetadataInfoResp> dtoList = hierarchyHelper.convertToRespWithHierarchy(files, parentCacheMap, FileMetadataInfoResp.class, FileMetadataHierarchyHelper::setFileHierarchy)
.stream().map(dto -> {
//计算当前用户对该文件的综合权限位
// 对于列表查询,如果层级很深,频繁递归会有性能问题。
dto.setPermissionValue(fileUserPermissionService.getMergedPermission(dto.getId(), ThreadLocalContext.getUserId()));
return dto;
}).collect(Collectors.toList());
PageDataResp<List<FileStorage>> pageDataResp = searchResult.getData();
PageInfo page = new PageInfo();
page.setPageNum(pageDataResp.getCurrentPage());

View File

@@ -171,9 +171,14 @@
#{approveType}
</foreach>
</if>
<if test="queryBigFileReq.dirId != null">
AND file_storage.dirId = #{queryBigFileReq.dirId}
<if test="queryBigFileReq.dirIds != null and queryBigFileReq.dirIds.size()>0">
AND file_storage.dirId IN
<foreach collection="queryBigFileReq.dirIds" item="dirId" open="(" separator="," close=")">
#{dirId}
</foreach>
</if>
<if test="queryBigFileReq.fileSuffix != null and queryBigFileReq.fileSuffix != ''">
AND file_storage.fileSuffix = #{queryBigFileReq.fileSuffix}
</if>

5293
flowable/repomix-output.xml Normal file

File diff suppressed because it is too large Load Diff