新增:data异步合并状态是否完成查询

This commit is contained in:
2026-03-24 15:33:18 +08:00
parent e5340bdbef
commit ce0d1f9973
6 changed files with 152 additions and 66 deletions

View File

@@ -7,12 +7,10 @@ import com.sdm.common.entity.enums.DirTypeEnum;
import com.sdm.common.entity.req.data.*;
import com.sdm.common.entity.req.system.LaunchApproveReq;
import com.sdm.common.entity.resp.PageDataResp;
import com.sdm.common.entity.resp.data.BatchAddFileInfoResp;
import com.sdm.common.entity.resp.data.BatchCreateNormalDirResp;
import com.sdm.common.entity.resp.data.ChunkUploadMinioFileResp;
import com.sdm.common.entity.resp.data.FileMetadataInfoResp;
import com.sdm.common.entity.resp.data.*;
import com.sdm.common.feign.inter.data.IDataFeignClient;
import com.sdm.common.log.annotation.SysLog;
import com.sdm.common.utils.FilesUtil;
import com.sdm.data.model.entity.FileMetadataInfo;
import com.sdm.data.model.req.*;
import com.sdm.data.model.resp.KKFileViewURLFromMinioResp;
@@ -26,6 +24,7 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
@@ -34,15 +33,9 @@ import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Comparator;
import java.util.List;
@Slf4j
@RestController
@RequestMapping("/data")
@Tag(name = "数据文件管理", description = "文件和目录管理相关接口")
@@ -563,6 +556,12 @@ public class DataFileController implements IDataFeignClient {
return IDataFileService.batchAddFileInfo(req);
}
@PostMapping("/getChunkUploadMergeResult")
@Operation(summary = "文件分片上传成功后前端轮询异步合并的结果")
public SdmResponse<List<CommonStatusResultResp>> getChunkUploadMergeResult(@RequestBody CommonStatusResultReq req) {
return IDataFileService.getChunkUploadMergeResult(req);
}
@PostMapping("/chunkUploadCallback")
@Operation(summary = "文件分片上传成功后前端回调")
public SdmResponse chunkUploadCallback(@RequestBody KnowledgeCallBackReq req) {
@@ -631,60 +630,91 @@ public class DataFileController implements IDataFeignClient {
return IDataFileService.onlyOfficeCallback(callbackData);
}
// /**
// * flowable 节点文件回传本地磁盘
// * @param fileId 绝对路径拼接成成临时碎片目录filename 文件名称absoluteFilePath文件的绝对路径不带文件名称
// * @return
// */
// @PostMapping ("/flowableUpFileToLocal")
// public ResponseEntity<String> uploadChunk(
// @RequestParam String fileId,
// @RequestParam int chunkIndex,
// @RequestParam String absoluteFilePath,
// HttpServletRequest request) throws IOException {
// // chunk 路径:/data/upload/chunks/{fileId}/{chunkIndex}.part
// Path normalize = Paths.get(absoluteFilePath).toAbsolutePath().normalize();
// Path chunkDir = normalize.resolve(fileId).normalize();
// Files.createDirectories(chunkDir);
// Path chunkPath = chunkDir.resolve(chunkIndex + ".temp");
// try (InputStream in = request.getInputStream();
// OutputStream out = Files.newOutputStream(chunkPath,
// StandardOpenOption.CREATE,
// StandardOpenOption.WRITE)) {
// in.transferTo(out);
// }
// return ResponseEntity.ok("OK");
// }
//
// @PostMapping ("/flowableUpFileToLocalMerge")
// public ResponseEntity<String> mergeChunks(
// @RequestParam String fileId,
// @RequestParam int totalChunks,
// @RequestParam String absoluteFilePath,
// @RequestParam String filename) throws IOException {
// Path targetDir = Paths.get(absoluteFilePath).toAbsolutePath().normalize();
// Path chunkDir = targetDir.resolve(fileId);
// Files.createDirectories(targetDir);
// Path targetFile = targetDir.resolve(filename);
// try (OutputStream out = Files.newOutputStream(
// targetFile,
// StandardOpenOption.CREATE,
// StandardOpenOption.WRITE)) {
// for (int i = 0; i < totalChunks; i++) {
// Path chunk = chunkDir.resolve(i + ".temp");
// try (InputStream in = Files.newInputStream(chunk)) {
// in.transferTo(out);
// }
// }
// }
// // 合并完删除分片
// Files.walk(chunkDir)
// .sorted(Comparator.reverseOrder())
// .forEach(path -> {
// try { Files.delete(path); } catch (Exception ignored) {}
// });
//
// return ResponseEntity.ok("SUCCESS:" + targetFile.toString());
// }
/**
* flowable 节点文件回传本地磁盘
* @param fileId 绝对路径拼接成成临时碎片目录filename 文件名称absoluteFilePath文件的绝对路径不带文件名称
* @return
* 节点文件分片上传(自动合并版)
* 接收分片 -> 保存分片 -> 判断是否为最后一个分片 -> 是则自动合并并清理分片
* @param fileId 文件唯一标识
* @param chunkIndex 当前分片序号从0开始
* @param totalChunks 总分片数
* @param absoluteFilePath 文件存储绝对路径(不带文件名)
* @param filename 最终文件名
* @param request 请求对象
* @return 上传结果(成功/合并成功)
* @throws IOException IO异常
*/
@PostMapping ("/flowableUpFileToLocal")
public ResponseEntity<String> uploadChunk(
@RequestParam String fileId,
@RequestParam int chunkIndex,
@RequestParam String absoluteFilePath,
HttpServletRequest request) throws IOException {
// chunk 路径:/data/upload/chunks/{fileId}/{chunkIndex}.part
Path normalize = Paths.get(absoluteFilePath).toAbsolutePath().normalize();
Path chunkDir = normalize.resolve(fileId).normalize();
Files.createDirectories(chunkDir);
Path chunkPath = chunkDir.resolve(chunkIndex + ".temp");
try (InputStream in = request.getInputStream();
OutputStream out = Files.newOutputStream(chunkPath,
StandardOpenOption.CREATE,
StandardOpenOption.WRITE)) {
in.transferTo(out);
@PostMapping("/nodeCallbackUploadLocalChunkAutoMerge")
public SdmResponse<String> nodeCallbackUploadLocalChunkAutoMerge(
@RequestParam String fileId,@RequestParam int chunkIndex,
@RequestParam int totalChunks,@RequestParam String absoluteFilePath,
@RequestParam String filename, HttpServletRequest request) {
String result = "";
try {
result = FilesUtil.handleChunkUploadLocalAndAutoMerge(
absoluteFilePath, fileId, chunkIndex,totalChunks,filename,
request.getInputStream() );
} catch (Exception e) {
log.error("nodeCallbackUploadLocalChunkAutoMerge error:{}",e.getMessage());
result = "上传异常";
return SdmResponse.failed(result);
}
return ResponseEntity.ok("OK");
}
@PostMapping ("/flowableUpFileToLocalMerge")
public ResponseEntity<String> mergeChunks(
@RequestParam String fileId,
@RequestParam int totalChunks,
@RequestParam String absoluteFilePath,
@RequestParam String filename) throws IOException {
Path targetDir = Paths.get(absoluteFilePath).toAbsolutePath().normalize();
Path chunkDir = targetDir.resolve(fileId);
Files.createDirectories(targetDir);
Path targetFile = targetDir.resolve(filename);
try (OutputStream out = Files.newOutputStream(
targetFile,
StandardOpenOption.CREATE,
StandardOpenOption.WRITE)) {
for (int i = 0; i < totalChunks; i++) {
Path chunk = chunkDir.resolve(i + ".temp");
try (InputStream in = Files.newInputStream(chunk)) {
in.transferTo(out);
}
}
}
// 合并完删除分片
Files.walk(chunkDir)
.sorted(Comparator.reverseOrder())
.forEach(path -> {
try { Files.delete(path); } catch (Exception ignored) {}
});
return ResponseEntity.ok("SUCCESS:" + targetFile.toString());
return SdmResponse.success(result);
}
/**

View File

@@ -5,9 +5,7 @@ import com.sdm.common.entity.req.data.*;
import com.sdm.common.entity.req.data.CopyFileToTaskReq;
import com.sdm.common.entity.req.system.LaunchApproveReq;
import com.sdm.common.entity.resp.PageDataResp;
import com.sdm.common.entity.resp.data.BatchAddFileInfoResp;
import com.sdm.common.entity.resp.data.ChunkUploadMinioFileResp;
import com.sdm.common.entity.resp.data.FileMetadataInfoResp;
import com.sdm.common.entity.resp.data.*;
import com.sdm.data.model.entity.FileMetadataInfo;
import com.sdm.data.model.req.*;
import com.sdm.data.model.resp.KKFileViewURLFromMinioResp;
@@ -21,7 +19,6 @@ import java.io.File;
import java.io.InputStream;
import java.util.List;
import com.sdm.common.entity.req.data.BatchCreateNormalDirReq;
import com.sdm.common.entity.resp.data.BatchCreateNormalDirResp;
/**
* 数据文件服务接口
@@ -465,4 +462,7 @@ public interface IDataFileService {
default SdmResponse<List<FileMetadataInfoResp>> queryFileListByIdList(QueryFileReq queryFileReq){return null;}
default SdmResponse<FileMetadataInfoResp> queryFileIdByNodeId(QueryFileIdReq queryFileIdReq){return null;};
SdmResponse<List<CommonStatusResultResp>> getChunkUploadMergeResult(CommonStatusResultReq req);
}

View File

@@ -4479,6 +4479,27 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
return SdmResponse.success(fileMetadataInfoResp);
}
@Override
public SdmResponse<List<CommonStatusResultResp>> getChunkUploadMergeResult(CommonStatusResultReq req) {
// 1. 参数校验(更语义化)
if (StringUtils.isEmpty(req.getScene()) ||CollectionUtils.isEmpty(req.getSuccBusinessIds())) {
throw new IllegalArgumentException("请求参数非法scene或succBusinessIds不能为空");
}
String scene = req.getScene();
List<Long> succBusinessIds = req.getSuccBusinessIds();
List<CommonStatusResultResp> results = succBusinessIds.stream()
.map(businessId -> {
CommonStatusResultResp resp = new CommonStatusResultResp();
resp.setSuccBusinessId(businessId);
// 提前拼接key代码更干净
String key = scene + businessId;
resp.setFinished(commonStatusUtil.exists(key) ? "N" : "Y");
return resp;
})
.toList();
return SdmResponse.success(results);
}
// ========================== 批量创建目录实现 ==========================
/**

View File

@@ -15,6 +15,7 @@ import com.sdm.common.entity.pojo.system.SysCompany;
import com.sdm.common.entity.pojo.system.SysUserInfo;
import com.sdm.common.entity.req.data.*;
import com.sdm.common.entity.resp.data.BatchAddFileInfoResp;
import com.sdm.common.entity.resp.data.CommonStatusResultResp;
import com.sdm.common.entity.resp.data.FileMetadataInfoResp;
import com.sdm.common.service.CommonService;
import com.sdm.common.utils.*;
@@ -1664,5 +1665,9 @@ public class SystemFileIDataFileServiceImpl implements IDataFileService {
return null;
}
@Override
public SdmResponse<List<CommonStatusResultResp>> getChunkUploadMergeResult(CommonStatusResultReq req){
return null;
}
}