fix:文件打标签重构

This commit is contained in:
2026-02-03 19:08:31 +08:00
parent 0bb0eaf837
commit 92537d8ada
29 changed files with 881 additions and 86 deletions

View File

@@ -50,6 +50,12 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${lombok.version}</version>
<optional>true</optional>
</dependency>
<!-- FTP -->
<dependency>
<groupId>commons-net</groupId>
@@ -190,12 +196,6 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<excludes>
<exclude>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</exclude>
</excludes>
<includeSystemScope>true</includeSystemScope>
</configuration>
</plugin>

View File

@@ -1,5 +1,6 @@
package com.sdm.data.controller;
import com.sdm.common.annotation.AutoFillDictTags;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.entity.req.data.GetSimulationTaskFileReq;
import com.sdm.common.entity.req.export.FileAnalysisExportExcelFormat;
@@ -33,6 +34,7 @@ public class DataAnalysisController implements IDataAnalysisFeignClient {
* 数据分析(数据查询)-获取 simulationtTask的文件
*
*/
@AutoFillDictTags
@Operation(summary = "获取 simulationtTask的文件,文件类型: 1模型文件 2仿真报告 3计算文件 4曲线文件 5云图文件")
@PostMapping("/getSimulationTaskFile")
public SdmResponse<PageDataResp<List<SimulationTaskResultCurveResp>>> getSimulationTaskFile(@RequestBody @Validated GetSimulationTaskFileReq getSimulationTaskFileReq) {
@@ -50,6 +52,7 @@ public class DataAnalysisController implements IDataAnalysisFeignClient {
return dataAnalysisService.getCSVData(fileId);
}
@AutoFillDictTags
@PostMapping(value = "/exportTaskFileByScript")
@ResponseBody
SdmResponse exportTaskFileByScript(HttpServletResponse response , @RequestBody FileAnalysisExportExcelFormat req) {

View File

@@ -1,5 +1,6 @@
package com.sdm.data.controller;
import com.sdm.common.annotation.AutoFillDictTags;
import com.sdm.common.annotation.IgnoreAuth;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.entity.enums.DirTypeEnum;
@@ -138,6 +139,7 @@ public class DataFileController implements IDataFeignClient {
* @param req
* @return
*/
@AutoFillDictTags
@PostMapping("/fileSearch")
@Operation(summary = "搜索文件", description = "根据搜索条件查找匹配的文件")
public SdmResponse fileSearch(@RequestBody @Validated FileSearchReq req) {
@@ -349,6 +351,7 @@ public class DataFileController implements IDataFeignClient {
* @param req
* @return
*/
@AutoFillDictTags
@SysLog("上传文件")
@PostMapping(value = "/uploadFiles", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
@Operation(
@@ -494,6 +497,7 @@ public class DataFileController implements IDataFeignClient {
/**
* 文件信息入库准备发起评审
*/
@AutoFillDictTags
@SysLog("新增文件")
@PostMapping("/batchAddFileInfo")
@Operation(summary = "文件信息入库准备发起评审")

View File

@@ -1,6 +1,7 @@
package com.sdm.data.controller;
import com.sdm.common.annotation.AutoFillDictTags;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.entity.req.data.CreateDirReq;
import com.sdm.common.entity.req.data.UploadFilesReq;
@@ -86,6 +87,7 @@ public class DataOverviewController {
/**
* 搜索项目节点下的文件
*/
@AutoFillDictTags
@PostMapping("/searchSimulationNodeFiles")
@Operation(summary = "搜索项目节点下的文件")
public SdmResponse searchSimulationNodeFiles(@RequestBody FileSearchReq req) {

View File

@@ -3,6 +3,7 @@ package com.sdm.data.controller;
import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.ObjectUtils;
import com.sdm.common.annotation.AutoFillDictTags;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.common.ThreadLocalContext;
import com.sdm.common.entity.req.project.GetAllTasksByDisciplineReq;
@@ -119,6 +120,7 @@ public class DataStorageAnalysisController {
}
// 存储系统大文件筛选
@AutoFillDictTags
@PostMapping("/listBigFile")
@Operation(summary = "存储系统大文件筛选")
public SdmResponse<PageDataResp<List<ListBigFileResp>>> listBigFile(@RequestBody QueryBigFileReq queryBigFileReq){

View File

@@ -1,12 +1,15 @@
package com.sdm.data.model.req;
import com.alibaba.fastjson2.annotation.JSONField;
import com.sdm.common.entity.req.data.BaseReq;
import com.sdm.common.entity.req.system.DictTagReq;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@Data
@Schema(description = "文件搜索请求参数")
@@ -86,6 +89,34 @@ public class FileSearchReq extends BaseReq {
@Schema(description = "文件业务类型1模型文件 2仿真报告、3计算文件、4曲线文件、5云图文件6网格文件7计算过程文件",enumAsRef = true)
private List<Integer> fileBizType;
// ----------------------------------------------------------------
// 很重要用于设置标签通过autoFillDictTags切面 设置dictTagIdsCache
/**
* 字典标签查询结果缓存
* key: dictClass字典类
* value: Map<dictValue, dictId>(字典值 -> 字典ID
* 此字段由服务层填充,前端不需传入
*/
@Schema(description = "字典标签查询结果缓存", hidden = true)
@JSONField(serialize = false)
private Map<String, Map<String, Integer>> dictTagIdsCache;
@Schema(description = "字典标签查询列表,格式:['fileTypeDictClass','fileTypeDictValue','disciplineTypeDictClass','disciplineDictValue']")
private List<String> dictTags;
@Schema(description = "文件类型字典类")
private String fileTypeDictClass;
@Schema(description = "文件类型字典值")
private String fileTypeDictValue;
@Schema(description = "学科类型字典类")
private String disciplineTypeDictClass;
@Schema(description = "学科类型字典值")
private String disciplineDictValue;
// ----------------------------------------------------------------
/**
* 是否过滤空数据
*/

View File

@@ -1,13 +1,16 @@
package com.sdm.data.model.req;
import com.alibaba.fastjson2.annotation.JSONField;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.sdm.common.entity.req.data.BaseReq;
import com.sdm.common.entity.req.system.DictTagReq;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@Data
public class QueryBigFileReq extends BaseReq {
@@ -64,13 +67,49 @@ public class QueryBigFileReq extends BaseReq {
@Schema(description = "文件业务类型1模型文件 2仿真报告、3计算文件、4曲线文件、5云图文件6网格文件7计算过程文件")
private List<Integer> fileBizType;
@Schema(description = "上传人id")
private List<Long> uploadUserId;
// ----------------------------------------------------------------
// 很重要用于设置标签通过autoFillDictTags切面 设置dictTagIdsCache
/**
* 字典标签查询结果缓存
* key: dictClass字典类
* value: Map<dictValue, dictId>(字典值 -> 字典ID
* 此字段由服务层填充,前端不需传入
*/
@Schema(description = "字典标签查询结果缓存", hidden = true)
@JSONField(serialize = false)
private Map<String, Map<String, Integer>> dictTagIdsCache;
@Schema(description = "字典标签查询列表,格式:['fileTypeDictClass','fileTypeDictValue','disciplineTypeDictClass','disciplineDictValue']")
private List<String> dictTags;
@Schema(description = "文件类型字典类")
private String fileTypeDictClass;
@Schema(description = "文件类型字典值")
private String fileTypeDictValue;
@Schema(description = "学科类型字典类")
private String disciplineTypeDictClass;
@Schema(description = "学科类型字典值")
private String disciplineDictValue;
// ----------------------------------------------------------------
@Schema(description = "上传人 id")
private List<Long> uploadUserId;
@Schema(description = "审批类型")
private List<Integer> approveTypeList;
@Schema(description = "是否最新")
Boolean isLatest=true;
/**
* 通过标签查询到的文件ID列表
* 此字段由服务层填充,用于标签筛选
*/
@Schema(description = "通过标签查询到的文件ID列表", hidden = true)
@JSONField(serialize = false)
private List<Long> fileIds;
}

View File

@@ -65,7 +65,6 @@ public class DataAnalysisServiceImpl implements IDataAnalysisService {
// 1. 构造查询条件
QueryBigFileReq queryBigFileReq = new QueryBigFileReq();
BeanUtils.copyProperties(getSimulationTaskFileReq, queryBigFileReq);
queryBigFileReq.setFileBizType(List.of(getSimulationTaskFileReq.getFileBizType()));
// 获取特定 UUID 对应的目录 ID
if (ObjectUtils.isNotEmpty(getSimulationTaskFileReq.getUuid())) {

View File

@@ -7,10 +7,13 @@ import com.github.pagehelper.PageInfo;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.common.ThreadLocalContext;
import com.sdm.common.entity.enums.ApproveFileDataTypeEnum;
import com.sdm.common.entity.req.system.DictTagReq;
import com.sdm.common.entity.req.system.UserListReq;
import com.sdm.common.entity.resp.PageDataResp;
import com.sdm.common.entity.resp.system.CIDUserResp;
import com.sdm.common.feign.impl.system.SysConfigFeignClientImpl;
import com.sdm.common.feign.impl.system.SysUserFeignClientImpl;
import com.sdm.common.feign.inter.system.ISysConfigFeignClient;
import com.sdm.common.service.UserNameCacheService;
import com.sdm.common.utils.FileSizeUtils;
import com.sdm.common.utils.PageUtils;
@@ -19,6 +22,7 @@ import com.sdm.data.model.dto.UserTotalFileSizeDTO;
import com.sdm.data.model.entity.FileMetadataInfo;
import com.sdm.data.model.entity.FileStorage;
import com.sdm.data.model.entity.FileStorageQuota;
import com.sdm.data.model.entity.FileTagRel;
import com.sdm.data.model.enums.FileStorageQuotaStatusEnum;
import com.sdm.data.model.req.AddUserQuotaEntity;
import com.sdm.common.entity.req.data.DelFileReq;
@@ -54,6 +58,9 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis {
@Autowired
SysUserFeignClientImpl sysUserFeignClient;
@Autowired
ISysConfigFeignClient sysConfigFeignClient;
@Autowired
UserNameCacheService userNameCacheService;
@@ -67,6 +74,9 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis {
@Autowired
FileMetadataHierarchyHelper hierarchyHelper;
@Autowired
IFileTagRelService fileTagRelService;
/**
* 根据节点类型获取存储空间占用(支持批量查询)
*/
@@ -456,6 +466,15 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis {
if(ObjectUtils.isNotEmpty((queryBigFileReq.getDirId()))){
queryBigFileReq.getDirIds().add(queryBigFileReq.getDirId());
}
// 处理标签查询使用AOP自动填充的dictTagIdsCache
if (queryBigFileReq.getDictTags() != null && !queryBigFileReq.getDictTags().isEmpty()) {
List<Long> fileIds = extractFileIdsByTags(queryBigFileReq);
if (fileIds != null) {
queryBigFileReq.setFileIds(fileIds);
}
}
Long tenantId = ThreadLocalContext.getTenantId();
PageHelper.startPage(queryBigFileReq.getCurrent(), queryBigFileReq.getSize());
List<FileStorage> list = fileStorageService.selectBigFiles(queryBigFileReq, fileSizeInBytes, tenantId);
@@ -490,6 +509,47 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis {
}
}
/**
* 从缓存的字典标签ID中提取文件ID列表
* 此方法直接使用AOP切面自动填充的dictTagIdsCache无需再次查询字典
*
* @param queryBigFileReq 查询请求对象已由AOP填充dictTagIdsCache
* @return 匹配的文件ID列表如果没有匹配项则返回null
*/
private List<Long> extractFileIdsByTags(QueryBigFileReq queryBigFileReq) {
// 使用AOP自动填充的dictTagIdsCache
Map<String, Map<String, Integer>> dictIdMap = queryBigFileReq.getDictTagIdsCache();
if (dictIdMap == null || dictIdMap.isEmpty()) {
log.warn("Dict tags cache is empty, cannot extract file ids by tags");
return null;
}
// 收集所有字典ID
Set<Integer> tagIds = new HashSet<>();
for (Map<String, Integer> valueMap : dictIdMap.values()) {
tagIds.addAll(valueMap.values());
}
if (CollectionUtils.isEmpty(tagIds)) {
return null;
}
Long tenantId = ThreadLocalContext.getTenantId();
List<Long> dirIds = CollectionUtils.isEmpty(queryBigFileReq.getDirIds()) ? null : queryBigFileReq.getDirIds();
// 查询符合标签的文件id
return fileTagRelService.lambdaQuery()
.eq(FileTagRel::getTenantId, tenantId)
.in(dirIds != null, FileTagRel::getDirId, dirIds)
.in(FileTagRel::getTagId, tagIds)
.select(FileTagRel::getFileId)
.list()
.stream()
.map(FileTagRel::getFileId)
.distinct()
.collect(Collectors.toList());
}
@Override
public SdmResponse batchDeleteBigFile(List<Long> fileIds) {
if (CollectionUtils.isEmpty(fileIds)) {

View File

@@ -76,6 +76,7 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
@@ -166,6 +167,9 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
@Autowired
private LocalFileService localFileService;
@Autowired
private DictTagHelper dictTagHelper;
@@ -374,7 +378,6 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
*
* @param filePath 文件路径
* @param fileName 文件名
* @param fileType 文件类型
* @param projectId 项目ID
* @param analysisDirectionId 分析方向ID
* @param remarks 备注信息
@@ -382,8 +385,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
* @param fileSize 文件大小
* @return 创建后的文件元数据信息
*/
private FileMetadataInfo createFileMetadata(String filePath, String fileName, Integer fileType
, String projectId, String analysisDirectionId, String remarks, Long parentId, Long fileSize
private FileMetadataInfo createFileMetadata(String filePath, String fileName, String projectId, String analysisDirectionId, String remarks, Long parentId, Long fileSize
) {
FileMetadataInfo fileInfo = new FileMetadataInfo();
fileInfo.setObjectKey(filePath);
@@ -393,7 +395,6 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
fileInfo.setAnalysisDirectionId(analysisDirectionId);
fileInfo.setRemarks(remarks);
fileInfo.setDataType(DataTypeEnum.FILE.getValue());
fileInfo.setFileType(fileType);
fileInfo.setParentId(parentId);
fileInfo.setIsRoot(false);
fileInfo.setCreatorId(ThreadLocalContext.getUserId());
@@ -1442,7 +1443,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
try {
minioService.copyFile(oldDirMinioObjectKey, newDirMinioObjectKey,sourceMetadataInfo.getBucketName());
// 创建目录元数据并保存到数据库
FileMetadataInfo fileInfo = createFileMetadata(newDirMinioObjectKey, sourceMetadataInfo.getOriginalName(), sourceMetadataInfo.getFileType(),
FileMetadataInfo fileInfo = createFileMetadata(newDirMinioObjectKey, sourceMetadataInfo.getOriginalName(),
null, null, null, targetParentMetadataInfo.getId(), sourceMetadataInfo.getFileSize());
fileMetadataInfoService.save(fileInfo);
return SdmResponse.success("复制文件成功");
@@ -1787,7 +1788,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
try {
minioService.uploadFile(req.getFile(), fileMinioObjectKey, null, dirMetadataInfo.getBucketName());
FileMetadataInfo fileInfo = createFileMetadata(fileMinioObjectKey, originalName, req.getFileType(),
FileMetadataInfo fileInfo = createFileMetadata(fileMinioObjectKey, originalName,
req.getProjectId(), req.getAnalysisDirectionId(), req.getRemarks(), dirMetadataInfo.getId(), req.getFile().getSize()
);
fileMetadataInfoService.save(fileInfo);
@@ -1826,7 +1827,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
}
try {
FileMetadataInfo fileInfo = createFileMetadata(fileMinioObjectKey, fileReq.getFileName(), fileReq.getFileType(),
FileMetadataInfo fileInfo = createFileMetadata(fileMinioObjectKey, fileReq.getFileName(),
req.getProjectId(), req.getAnalysisDirectionId(), req.getRemarks(), dirMetadataInfo.getId(), fileReq.getSize());
fileInfo.setUploadTaskId(req.getUploadTaskId());
fileInfo.setTemplateId(req.getTemplateId());
@@ -1972,8 +1973,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
private void saveFileTags(UploadFilesReq req, FileMetadataInfo fileInfo, FileMetadataInfo dirMetadataInfo,
List<Long> ancestorDirIds) {
List<DictTagReq> tags = req.getTags();
if (CollectionUtils.isEmpty(tags)) {
if (CollectionUtils.isEmpty(req.getDictTags())) {
return;
}
@@ -1981,49 +1981,52 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
Long creatorId = ThreadLocalContext.getUserId();
long fileSize = resolveFileSize(req);
DictTagReq.BatchDictIdQueryReq batchReq = new DictTagReq.BatchDictIdQueryReq();
batchReq.setItems(tags);
SdmResponse<Map<String, Map<String, Integer>>> response = sysConfigFeignClient.batchQueryDictionaryIds(batchReq);
if (!response.isSuccess() || response.getData() == null) {
log.warn("Failed to query dictionary ids for tags");
return;
// 从缓存获取字典标签ID已由AOP切面自动填充
Map<String, Map<String, Integer>> dictIdMap = req.getDictTagIdsCache();
if (dictIdMap == null || dictIdMap.isEmpty()) {
// 如果缓存为空,尝试手动查询(兼容性处理)
log.warn("Dict tags cache is empty, trying to query manually");
dictIdMap = dictTagHelper.queryAndCacheDictTagIds(req);
if (dictIdMap.isEmpty()) {
log.warn("No dictionary ids found for tags");
return;
}
}
Map<String, Map<String, Integer>> dictIdMap = response.getData();
List<FileTagRel> directRelList = new ArrayList<>();
List<FileTagRel> derivedRelList = new ArrayList<>();
for (DictTagReq tag : tags) {
Map<String, Integer> valueMap = dictIdMap.get(tag.getDictName());
if (valueMap == null || valueMap.get(tag.getDictValue()) == null) {
log.warn("Dictionary not found for dictName: {}, dictValue: {}", tag.getDictName(), tag.getDictValue());
continue;
}
// 遍历查询结果,构造文件标签关系
for (Map.Entry<String, Map<String, Integer>> classEntry : dictIdMap.entrySet()) {
Map<String, Integer> valueMap = classEntry.getValue();
// 遍历该dictClass下的所有dictValue
for (Integer dictId : valueMap.values()) {
if (dictId == null) {
continue;
}
Integer dictId = valueMap.get(tag.getDictValue());
if (dictId == null) {
continue;
}
// 创建当前目录的直接关联
FileTagRel directRel = new FileTagRel();
directRel.setFileId(fileInfo.getId());
directRel.setTagId(dictId);
directRel.setDirId(dirMetadataInfo.getId());
directRel.setTenantId(tenantId);
directRel.setCreatorId(creatorId);
directRel.setFileSize(fileSize);
directRelList.add(directRel);
FileTagRel directRel = new FileTagRel();
directRel.setFileId(fileInfo.getId());
directRel.setTagId(dictId);
directRel.setDirId(dirMetadataInfo.getId());
directRel.setTenantId(tenantId);
directRel.setCreatorId(creatorId);
directRel.setFileSize(fileSize);
directRelList.add(directRel);
for (Long dirIdItem : ancestorDirIds) {
FileTagRel derivedRel = new FileTagRel();
derivedRel.setFileId(fileInfo.getId());
derivedRel.setTagId(dictId);
derivedRel.setDirId(dirIdItem);
derivedRel.setTenantId(tenantId);
derivedRel.setCreatorId(creatorId);
derivedRel.setFileSize(fileSize);
derivedRelList.add(derivedRel);
// 创建祖先目录的派生关联
for (Long dirIdItem : ancestorDirIds) {
FileTagRel derivedRel = new FileTagRel();
derivedRel.setFileId(fileInfo.getId());
derivedRel.setTagId(dictId);
derivedRel.setDirId(dirIdItem);
derivedRel.setTenantId(tenantId);
derivedRel.setCreatorId(creatorId);
derivedRel.setFileSize(fileSize);
derivedRelList.add(derivedRel);
}
}
}
@@ -2315,7 +2318,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
minioService.uploadFile(req.getFile(), newFileMinioObjectKey, null,oldFileMetadataInfo.getBucketName());
// 创建目录元数据并保存到数据库
FileMetadataInfo newFileInfo = createFileMetadata(newFileMinioObjectKey, req.getFileName(), req.getFileType(),
FileMetadataInfo newFileInfo = createFileMetadata(newFileMinioObjectKey, req.getFileName(),
req.getProjectId(), req.getAnalysisDirectionId(), req.getRemarks(), oldFileMetadataInfo.getParentId(), req.getFile().getSize()
);
newFileInfo.setFileGroupId(fileGroupId);
@@ -2470,7 +2473,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
minioService.uploadFile(avatar, avatarMinioObjectKey, null,avatarDirMetadataInfo.getBucketName());
// 创建目录元数据并保存到数据库
FileMetadataInfo fileInfo = createFileMetadata(avatarMinioObjectKey, newFilename, null,
FileMetadataInfo fileInfo = createFileMetadata(avatarMinioObjectKey, newFilename,
null, null, null, parAvatarDirId, avatar.getSize());
fileMetadataInfoService.save(fileInfo);
JSONObject jsonObject = new JSONObject();
@@ -2531,7 +2534,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
minioService.uploadFile(paramFile, simulationParamMinioObjectKey, null,simulationParamDirMetadataInfo.getBucketName());
// 创建目录元数据并保存到数据库
FileMetadataInfo fileInfo = createFileMetadata(simulationParamMinioObjectKey, originalFilename, null,
FileMetadataInfo fileInfo = createFileMetadata(simulationParamMinioObjectKey, originalFilename,
null, null, null, parSimulationParamDirId, paramFile.getSize());
// 设置文件状态为审批中 暂不可见
fileInfo.setApprovalStatus(ApprovalFileDataStatusEnum.PENDING.getKey());
@@ -2627,7 +2630,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
minioService.uploadFile(trainModelFile, trainingMinioObjectKey, null,trainModelDirMetadataInfo.getBucketName());
// 创建目录元数据并保存到数据库
FileMetadataInfo fileInfo = createFileMetadata(trainingMinioObjectKey, originalFilename, null,
FileMetadataInfo fileInfo = createFileMetadata(trainingMinioObjectKey, originalFilename,
null, null, null, parTrainModelDirId, trainModelFile.getSize());
fileMetadataInfoService.save(fileInfo);
return SdmResponse.success(fileInfo.getId());
@@ -2672,7 +2675,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
minioService.uploadFile(file, fileMinioObjectKey, null,dirMetadataInfo.getBucketName());
// 创建目录元数据并保存到数据库
FileMetadataInfo fileInfo = createFileMetadata(fileMinioObjectKey, originalFilename, null,
FileMetadataInfo fileInfo = createFileMetadata(fileMinioObjectKey, originalFilename,
null, null, null, parDirId, file.getSize());
fileMetadataInfoService.save(fileInfo);
return SdmResponse.success(fileInfo.getId());

View File

@@ -158,7 +158,7 @@
<select id="selectBigFiles" resultType="com.sdm.data.model.entity.FileStorage">
SELECT
distinct
file_storage.fileName,file_storage.fileId,file_storage.userGroupId,file_storage.userId,file_storage.fileBizType,file_storage.fileSuffix,file_storage.updateTime
file_storage.fileName,file_storage.fileId,file_storage.userGroupId,file_storage.userId,file_storage.fileSuffix,file_storage.updateTime
FROM file_metadata_info
inner join file_storage on file_storage.fileId = file_metadata_info.id
<where>
@@ -183,10 +183,10 @@
<if test="queryBigFileReq.fileSuffix != null and queryBigFileReq.fileSuffix != ''">
AND file_storage.fileSuffix = #{queryBigFileReq.fileSuffix}
</if>
<if test="queryBigFileReq.fileBizType != null and queryBigFileReq.fileBizType.size()>0">
AND file_storage.fileBizType IN
<foreach collection="queryBigFileReq.fileBizType" item="type" open="(" separator="," close=")">
#{type}
<if test="queryBigFileReq.fileIds != null and queryBigFileReq.fileIds.size()>0">
AND file_storage.fileId IN
<foreach collection="queryBigFileReq.fileIds" item="fileId" open="(" separator="," close=")">
#{fileId}
</foreach>
</if>
<if test="queryBigFileReq.fileName != null and queryBigFileReq.fileName != ''">