From a7a043ac09216124fb6875b2b68f4789f196e65d Mon Sep 17 00:00:00 2001 From: gulongcheng <474084054@qq.com> Date: Mon, 5 Jan 2026 16:23:33 +0800 Subject: [PATCH] =?UTF-8?q?=E6=95=B0=E6=8D=AE=E6=80=BB=E8=A7=88=20?= =?UTF-8?q?=E6=95=B0=E6=8D=AE=E6=A3=80=E7=B4=A2=E6=8A=A5=E9=94=99=20?= =?UTF-8?q?=E4=BC=98=E5=8C=96=20=E6=95=B0=E6=8D=AE=E6=80=BB=E8=A7=88?= =?UTF-8?q?=E3=80=81=E6=95=B0=E6=8D=AE=E6=9F=A5=E8=AF=A2=E3=80=81=E6=95=B0?= =?UTF-8?q?=E6=8D=AE=E5=AD=98=E5=82=A8=20=E5=85=A8=E9=87=8F=E5=B1=95?= =?UTF-8?q?=E7=A4=BA=E6=96=87=E4=BB=B6=E6=89=80=E5=B1=9E=E9=A1=B9=E7=9B=AE?= =?UTF-8?q?=E8=8A=82=E7=82=B9=E4=BF=A1=E6=81=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../com/sdm/common/entity/resp/BaseResp.java | 54 + .../resp/data/FileMetadataInfoResp.java | 3 +- .../data/SimulationTaskResultCurveResp.java | 25 +- .../com/sdm/data/model/req/FileSearchReq.java | 6 + .../sdm/data/model/req/QueryBigFileReq.java | 6 + .../sdm/data/model/resp/ListBigFileResp.java | 37 +- .../service/impl/DataAnalysisServiceImpl.java | 76 +- .../service/impl/DataStorageAnalysisImpl.java | 83 +- .../impl/DimensionTemplateServiceImpl.java | 40 +- .../impl/FileMetadataHierarchyHelper.java | 357 ++ .../impl/MinioFileIDataFileServiceImpl.java | 57 +- .../resources/mapper/FileStorageMapper.xml | 9 +- flowable/repomix-output.xml | 5293 +++++++++++++++++ 13 files changed, 5812 insertions(+), 234 deletions(-) create mode 100644 common/src/main/java/com/sdm/common/entity/resp/BaseResp.java create mode 100644 data/src/main/java/com/sdm/data/service/impl/FileMetadataHierarchyHelper.java create mode 100644 flowable/repomix-output.xml diff --git a/common/src/main/java/com/sdm/common/entity/resp/BaseResp.java b/common/src/main/java/com/sdm/common/entity/resp/BaseResp.java new file mode 100644 index 00000000..73227c1c --- /dev/null +++ b/common/src/main/java/com/sdm/common/entity/resp/BaseResp.java @@ -0,0 +1,54 @@ +package com.sdm.common.entity.resp; + +import io.swagger.v3.oas.annotations.media.Schema; +import lombok.Data; + +@Data +public class BaseResp { + + //格式化后的文件大小 + @Schema(description = "格式化后的文件大小") + private String formatFileSize; + + @Schema(description = "所属项目") + String ownProjectName; + + @Schema(description = "所属项目id") + private String ownProjectId; + + @Schema(description = "所属阶段") + String ownPhaseName; + + @Schema(description = "所属阶段id") + private String ownPhaseId; + + @Schema(description = "所属学科") + String ownDisciplineName; + + @Schema(description = "所属学科id") + private String ownDisciplineId; + + @Schema(description = "所属机器") + private String ownMachineName; + + @Schema(description = "所属机器id") + private String ownMachineId; + + @Schema(description = "所属工作空间") + private String ownWorkspaceName; + + @Schema(description = "所属工作空间id") + private String ownWorkspaceId; + + @Schema(description = "所属工况任务") + String owntaskName; + + @Schema(description = "所属工况任务id") + private String owntaskId; + + @Schema(description = "所属算列") + String ownRunName; + + @Schema(description = "所属算列id") + private String ownRunId; +} diff --git a/common/src/main/java/com/sdm/common/entity/resp/data/FileMetadataInfoResp.java b/common/src/main/java/com/sdm/common/entity/resp/data/FileMetadataInfoResp.java index e1e441ba..feb961d8 100644 --- a/common/src/main/java/com/sdm/common/entity/resp/data/FileMetadataInfoResp.java +++ b/common/src/main/java/com/sdm/common/entity/resp/data/FileMetadataInfoResp.java @@ -3,6 +3,7 @@ package com.sdm.common.entity.resp.data; import com.baomidou.mybatisplus.annotation.TableField; import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.sdm.common.entity.resp.BaseResp; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; @@ -16,7 +17,7 @@ import java.util.List; @Data @Schema(name = "FileMetadataDTO", description = "文件元数据传输对象") @JsonIgnoreProperties(ignoreUnknown = true) -public class FileMetadataInfoResp implements Serializable { +public class FileMetadataInfoResp extends BaseResp implements Serializable { private static final long serialVersionUID = 1L; diff --git a/common/src/main/java/com/sdm/common/entity/resp/data/SimulationTaskResultCurveResp.java b/common/src/main/java/com/sdm/common/entity/resp/data/SimulationTaskResultCurveResp.java index 101e1118..ce9c6963 100644 --- a/common/src/main/java/com/sdm/common/entity/resp/data/SimulationTaskResultCurveResp.java +++ b/common/src/main/java/com/sdm/common/entity/resp/data/SimulationTaskResultCurveResp.java @@ -1,6 +1,7 @@ package com.sdm.common.entity.resp.data; import com.fasterxml.jackson.annotation.JsonFormat; +import com.sdm.common.entity.resp.BaseResp; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; @@ -11,7 +12,7 @@ import java.time.LocalDateTime; */ @Data @Schema(description = "任务结果曲线响应对象") -public class SimulationTaskResultCurveResp { +public class SimulationTaskResultCurveResp extends BaseResp { @Schema(description = "文件id") private Long id; @@ -34,26 +35,4 @@ public class SimulationTaskResultCurveResp { @Schema(description = "创建时间") @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss") private LocalDateTime createTime; - - //格式化后的文件大小 - @Schema(description = "格式化后的文件大小") - private String formatFileSize; - - @Schema(description = "所属项目") - private String projectName; - - @Schema(description = "所属项目id") - private String projectId; - - @Schema(description = "所属阶段") - private String phaseName; - - @Schema(description = "所属阶段id") - private String phaseId; - - @Schema(description = "所属学科") - private String disciplineName; - - @Schema(description = "所属学科id") - private String disciplineId; } diff --git a/data/src/main/java/com/sdm/data/model/req/FileSearchReq.java b/data/src/main/java/com/sdm/data/model/req/FileSearchReq.java index 2fd56cac..ada0c867 100644 --- a/data/src/main/java/com/sdm/data/model/req/FileSearchReq.java +++ b/data/src/main/java/com/sdm/data/model/req/FileSearchReq.java @@ -5,6 +5,7 @@ import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import java.time.LocalDateTime; +import java.util.ArrayList; import java.util.List; @Data @@ -34,6 +35,11 @@ public class FileSearchReq extends BaseReq { @Schema(description = "父目录ID") private Long parentDirId; + /** + * 父目录集合下搜索 + */ + List parentDirIds = new ArrayList<>(); + /** * 父节点id */ diff --git a/data/src/main/java/com/sdm/data/model/req/QueryBigFileReq.java b/data/src/main/java/com/sdm/data/model/req/QueryBigFileReq.java index cf01af02..634e5220 100644 --- a/data/src/main/java/com/sdm/data/model/req/QueryBigFileReq.java +++ b/data/src/main/java/com/sdm/data/model/req/QueryBigFileReq.java @@ -6,6 +6,7 @@ import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import java.time.LocalDateTime; +import java.util.ArrayList; import java.util.List; @Data @@ -15,6 +16,11 @@ public class QueryBigFileReq extends BaseReq { */ private Long dirId; + /** + * 目录ID列表 + */ + private List dirIds = new ArrayList<>(); + /** * 目录类型 DirTypeEnum */ diff --git a/data/src/main/java/com/sdm/data/model/resp/ListBigFileResp.java b/data/src/main/java/com/sdm/data/model/resp/ListBigFileResp.java index 9a763026..74f1d42f 100644 --- a/data/src/main/java/com/sdm/data/model/resp/ListBigFileResp.java +++ b/data/src/main/java/com/sdm/data/model/resp/ListBigFileResp.java @@ -3,23 +3,20 @@ package com.sdm.data.model.resp; import com.baomidou.mybatisplus.annotation.FieldStrategy; import com.baomidou.mybatisplus.annotation.TableField; import com.fasterxml.jackson.annotation.JsonFormat; +import com.sdm.common.entity.resp.BaseResp; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Data; import java.time.LocalDateTime; @Data -public class ListBigFileResp { +public class ListBigFileResp extends BaseResp { @Schema(description = "文件id") private Long id; @Schema(description = "文件原始名称") private String originalName; - //格式化后的文件大小 - @Schema(description = "格式化后的文件大小") - private String formatFileSize; - @Schema(description= "approvalStatus") private String approvalStatus; @@ -33,36 +30,6 @@ public class ListBigFileResp { @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss") private LocalDateTime createTime; - @Schema(description = "所属项目") - String ownProjectName; - - @Schema(description = "所属项目id") - private String ownProjectId; - - @Schema(description = "所属阶段") - String ownPhaseName; - - @Schema(description = "所属阶段id") - private String ownPhaseId; - - @Schema(description = "所属学科") - String ownDisciplineName; - - @Schema(description = "所属学科id") - private String ownDisciplineId; - - @Schema(description = "所属工况任务") - String owntaskName; - - @Schema(description = "所属工况任务id") - private String owntaskId; - - @Schema(description = "所属算列") - String ownRunName; - - @Schema(description = "所属算列id") - private String ownRunId; - @Schema(description = "仿真执行人") String executorName; } diff --git a/data/src/main/java/com/sdm/data/service/impl/DataAnalysisServiceImpl.java b/data/src/main/java/com/sdm/data/service/impl/DataAnalysisServiceImpl.java index 7ee64057..9af60855 100644 --- a/data/src/main/java/com/sdm/data/service/impl/DataAnalysisServiceImpl.java +++ b/data/src/main/java/com/sdm/data/service/impl/DataAnalysisServiceImpl.java @@ -4,9 +4,7 @@ import com.alibaba.fastjson2.JSONObject; import com.github.pagehelper.PageInfo; import com.sdm.common.common.SdmResponse; import com.sdm.common.common.ThreadLocalContext; -import com.sdm.common.entity.enums.NodeTypeEnum; import com.sdm.common.entity.resp.PageDataResp; -import com.sdm.common.utils.FileSizeUtils; import com.sdm.common.utils.PageUtils; import com.sdm.data.model.entity.FileMetadataInfo; import com.sdm.data.model.entity.FileStorage; @@ -27,7 +25,6 @@ import org.springframework.stereotype.Service; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.InputStreamReader; -import java.math.BigDecimal; import java.nio.charset.StandardCharsets; import java.util.*; import java.util.stream.Collectors; @@ -44,6 +41,9 @@ public class DataAnalysisServiceImpl implements IDataAnalysisService { @Autowired private IMinioService MinIOService; + @Autowired + private FileMetadataHierarchyHelper hierarchyHelper; + @Override public SdmResponse>> getSimulationTaskFile(GetSimulationTaskFileReq getSimulationTaskFileReq) { // 1. 构造查询条件 @@ -72,71 +72,15 @@ public class DataAnalysisServiceImpl implements IDataAnalysisService { return PageUtils.getJsonObjectSdmResponse(new ArrayList<>(), new PageInfo<>()); } - // 3. 获取当前页文件的详细元数据 + // 3. 一站式处理:获取文件元数据、构建父目录缓存、转换为响应对象并设置层级信息 List fileIdList = fileStorages.stream().map(FileStorage::getFileId).toList(); - List currentFiles = fileMetadataInfoService.lambdaQuery() - .eq(FileMetadataInfo::getTenantId, ThreadLocalContext.getTenantId()) - .in(FileMetadataInfo::getId, fileIdList) - .list(); + List finalResultList = hierarchyHelper.processFileHierarchy( + fileIdList, + SimulationTaskResultCurveResp.class, + FileMetadataHierarchyHelper::setFileHierarchy + ); - // 批量分层获取所有相关的父目录 - // key 是 ID,value 是对应的元数据实体。用于在内存中快速查找。 - Map parentCacheMap = new HashMap<>(); - // 当前需要去数据库查的父级 ID 集合 - Set nextFetchIds = currentFiles.stream() - .map(FileMetadataInfo::getParentId) - .filter(pid -> pid != null && pid != 0) - .collect(Collectors.toSet()); - - int safetyDepth = 0; // 防死循环计数器 - // 只要还有没查过的父 ID,且深度在合理范围内(10层),就继续批量查 - while (CollectionUtils.isNotEmpty(nextFetchIds) && safetyDepth < 10) { - // 一次性查出当前这一层所有的父节点信息 - List parents = fileMetadataInfoService.listByIds(nextFetchIds); - if (CollectionUtils.isEmpty(parents)) break; - - nextFetchIds = new HashSet<>(); // 重置,准备收集下一层 ID - for (FileMetadataInfo p : parents) { - parentCacheMap.put(p.getId(), p); - // 如果这个父节点还有上级,且我们之前没查过这个上级,就加进下一次查询列表 - if (p.getParentId() != null && p.getParentId() != 0 && !parentCacheMap.containsKey(p.getParentId())) { - nextFetchIds.add(p.getParentId()); - } - } - safetyDepth++; - } - - // 内存组装数据:将 FileMetadata 转换为 Response,并回溯层级信息 - List finalResultList = currentFiles.stream().map(file -> { - SimulationTaskResultCurveResp resp = new SimulationTaskResultCurveResp(); - BeanUtils.copyProperties(file, resp); - resp.setFormatFileSize(FileSizeUtils.formatFileSize(BigDecimal.valueOf(file.getFileSize()))); - - // 从 parentCacheMap 中回溯,设置项目、阶段、专业信息 - Long pid = file.getParentId(); - int limit = 0; - // 这里的循环完全在内存中进行,速度极快且不产生日志 - while (pid != null && parentCacheMap.containsKey(pid) && limit < 15) { - FileMetadataInfo folder = parentCacheMap.get(pid); - String ownType = folder.getRelatedResourceUuidOwnType(); - - if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) { - resp.setProjectName(folder.getOriginalName()); - resp.setProjectId(folder.getRelatedResourceUuid()); - } else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) { - resp.setPhaseName(folder.getOriginalName()); - resp.setPhaseId(folder.getRelatedResourceUuid()); - } else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) { - resp.setDisciplineName(folder.getOriginalName()); - resp.setDisciplineId(folder.getRelatedResourceUuid()); - } - pid = folder.getParentId(); - limit++; - } - return resp; - }).toList(); - - // 6. 构造分页信息并返回 + // 4. 构造分页信息并返回 PageInfo pageInfo = new PageInfo<>(); pageInfo.setTotal(pageDataResp.getTotal()); pageInfo.setPageNum(pageDataResp.getCurrentPage()); diff --git a/data/src/main/java/com/sdm/data/service/impl/DataStorageAnalysisImpl.java b/data/src/main/java/com/sdm/data/service/impl/DataStorageAnalysisImpl.java index 85cb79e0..07944b95 100644 --- a/data/src/main/java/com/sdm/data/service/impl/DataStorageAnalysisImpl.java +++ b/data/src/main/java/com/sdm/data/service/impl/DataStorageAnalysisImpl.java @@ -7,7 +7,6 @@ import com.github.pagehelper.PageInfo; import com.sdm.common.common.SdmResponse; import com.sdm.common.common.ThreadLocalContext; import com.sdm.common.entity.enums.ApproveFileDataTypeEnum; -import com.sdm.common.entity.enums.NodeTypeEnum; import com.sdm.common.entity.req.system.UserListReq; import com.sdm.common.entity.resp.PageDataResp; import com.sdm.common.entity.resp.system.CIDUserResp; @@ -65,6 +64,9 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis { @Autowired IFileStorageQuotaService fileStorageQuotaService; + @Autowired + FileMetadataHierarchyHelper hierarchyHelper; + public SdmResponse> getNodeSizeByNodeType(String queryNodeType, String queryNodeName, Integer intervalMonths, String targetYm) { // uuid是项目的uuid List nodeList = fileMetadataInfoService.lambdaQuery() @@ -248,77 +250,15 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis { return PageUtils.getJsonObjectSdmResponse(new ArrayList<>(), new PageInfo<>()); } - // 3. 获取当前页文件的详细元数据 + // 2. 一站式处理:获取文件元数据、构建父目录缓存、转换为响应对象并设置层级信息 List fileIdList = fileStorages.stream().map(FileStorage::getFileId).toList(); - List currentFiles = fileMetadataInfoService.lambdaQuery() - .eq(FileMetadataInfo::getTenantId, ThreadLocalContext.getTenantId()) - .in(FileMetadataInfo::getId, fileIdList) - .list(); + List finalResultList = hierarchyHelper.processFileHierarchy( + fileIdList, + ListBigFileResp.class, + FileMetadataHierarchyHelper::setFileHierarchy + ); - // 批量分层获取所有相关的父目录 - // key 是 ID,value 是对应的元数据实体。用于在内存中快速查找。 - Map parentCacheMap = new HashMap<>(); - // 当前需要去数据库查的父级 ID 集合 - Set nextFetchIds = currentFiles.stream() - .map(FileMetadataInfo::getParentId) - .filter(pid -> pid != null && pid != 0) - .collect(Collectors.toSet()); - - int safetyDepth = 0; // 防死循环计数器 - // 只要还有没查过的父 ID,且深度在合理范围内(10层),就继续批量查 - while (org.apache.commons.collections4.CollectionUtils.isNotEmpty(nextFetchIds) && safetyDepth < 10) { - // 一次性查出当前这一层所有的父节点信息 - List parents = fileMetadataInfoService.listByIds(nextFetchIds); - if (org.apache.commons.collections4.CollectionUtils.isEmpty(parents)) break; - - nextFetchIds = new HashSet<>(); // 重置,准备收集下一层 ID - for (FileMetadataInfo p : parents) { - parentCacheMap.put(p.getId(), p); - // 如果这个父节点还有上级,且我们之前没查过这个上级,就加进下一次查询列表 - if (p.getParentId() != null && p.getParentId() != 0 && !parentCacheMap.containsKey(p.getParentId())) { - nextFetchIds.add(p.getParentId()); - } - } - safetyDepth++; - } - - // 内存组装数据:将 FileMetadata 转换为 Response,并回溯层级信息 - List finalResultList = currentFiles.stream().map(file -> { - ListBigFileResp resp = new ListBigFileResp(); - BeanUtils.copyProperties(file, resp); - resp.setFormatFileSize(FileSizeUtils.formatFileSize(BigDecimal.valueOf(file.getFileSize()))); - - // 从 parentCacheMap 中回溯,设置项目、阶段、专业信息 - Long pid = file.getParentId(); - int limit = 0; - // 这里的循环完全在内存中进行,速度极快且不产生日志 - while (pid != null && parentCacheMap.containsKey(pid) && limit < 15) { - FileMetadataInfo folder = parentCacheMap.get(pid); - String ownType = folder.getRelatedResourceUuidOwnType(); - - if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) { - resp.setOwnProjectName(folder.getOriginalName()); - resp.setOwnProjectId(folder.getRelatedResourceUuid()); - } else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) { - resp.setOwnPhaseName(folder.getOriginalName()); - resp.setOwnPhaseId(folder.getRelatedResourceUuid()); - } else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) { - resp.setOwnDisciplineName(folder.getOriginalName()); - resp.setOwnDisciplineId(folder.getRelatedResourceUuid()); - }else if (NodeTypeEnum.TASK.getValue().equals(ownType)) { - resp.setOwntaskName(folder.getOriginalName()); - resp.setOwntaskId(folder.getRelatedResourceUuid()); - }if (NodeTypeEnum.RUN.getValue().equals(ownType)) { - resp.setOwnRunName(folder.getOriginalName()); - resp.setOwnRunId(folder.getRelatedResourceUuid()); - } - pid = folder.getParentId(); - limit++; - } - return resp; - }).toList(); - - // 6. 构造分页信息并返回 + // 3. 构造分页信息并返回 PageInfo pageInfo = new PageInfo<>(); pageInfo.setTotal(pageDataResp.getTotal()); pageInfo.setPageNum(pageDataResp.getCurrentPage()); @@ -339,6 +279,9 @@ public class DataStorageAnalysisImpl implements DataStorageAnalysis { if (queryBigFileReq.getFileSize() != null && queryBigFileReq.getFileSizeUnit() != null) { fileSizeInBytes = convertToBytes(queryBigFileReq.getFileSize(), queryBigFileReq.getFileSizeUnit()); } + if(ObjectUtils.isNotEmpty((queryBigFileReq.getDirId()))){ + queryBigFileReq.getDirIds().add(queryBigFileReq.getDirId()); + } Long tenantId = ThreadLocalContext.getTenantId(); PageHelper.startPage(queryBigFileReq.getCurrent(), queryBigFileReq.getSize()); List list = fileStorageService.selectBigFiles(queryBigFileReq, fileSizeInBytes, tenantId); diff --git a/data/src/main/java/com/sdm/data/service/impl/DimensionTemplateServiceImpl.java b/data/src/main/java/com/sdm/data/service/impl/DimensionTemplateServiceImpl.java index d2335b7f..91b5b41f 100644 --- a/data/src/main/java/com/sdm/data/service/impl/DimensionTemplateServiceImpl.java +++ b/data/src/main/java/com/sdm/data/service/impl/DimensionTemplateServiceImpl.java @@ -74,6 +74,9 @@ public class DimensionTemplateServiceImpl extends ServiceImpl group = entry.getValue(); if (!group.isEmpty()) { + FileMetadataHierarchyHelper.FileHierarchyResult filesWithParentCache = hierarchyHelper.getFilesWithParentCache(group.stream().map(FileMetadataInfo::getId).toList()); + Map parentCacheMap = filesWithParentCache.getParentCacheMap(); + + // 使用第一个作为基础对象 FileMetadataInfo baseInfo = group.get(0); // 创建包含children的DTO对象 FileMetadataChildrenDTO dto = FileMetadataConvert.INSTANCE.convertToFileMetadataChildrenDTO(baseInfo); + hierarchyHelper.backtrackAndSetHierarchy(baseInfo, parentCacheMap, dto, FileMetadataHierarchyHelper::setFileHierarchy); // 转换children列表,并为每个child设置totalName List children = new ArrayList<>(); for (FileMetadataInfo fileInfo : group) { FileMetadataChildrenDTO childDto = FileMetadataConvert.INSTANCE.convertToFileMetadataChildrenDTO(fileInfo); - + hierarchyHelper.backtrackAndSetHierarchy(fileInfo, parentCacheMap, childDto, FileMetadataHierarchyHelper::setFileHierarchy); // 设置totalName(从bucketName中提取) String objectKey = fileInfo.getObjectKey(); String baseDirPath = DirTypeEnum.PROJECT_NODE_DIR.getDirName() + "/"; @@ -319,22 +327,24 @@ public class DimensionTemplateServiceImpl extends ServiceImpl { - LocalDateTime maxCreateTime1 = dto1.getMergeSameNameChildren().stream() - .map(FileMetadataInfoResp::getCreateTime) - .filter(Objects::nonNull) - .max(LocalDateTime::compareTo) - .orElse(LocalDateTime.MIN); + if(CollectionUtils.isNotEmpty(result)) { + // 根据children中的最大创建时间对result进行倒序排序 + result.sort((dto1, dto2) -> { + LocalDateTime maxCreateTime1 = dto1.getMergeSameNameChildren().stream() + .map(FileMetadataInfoResp::getCreateTime) + .filter(Objects::nonNull) + .max(LocalDateTime::compareTo) + .orElse(LocalDateTime.MIN); - LocalDateTime maxCreateTime2 = dto2.getMergeSameNameChildren().stream() - .map(FileMetadataInfoResp::getCreateTime) - .filter(Objects::nonNull) - .max(LocalDateTime::compareTo) - .orElse(LocalDateTime.MIN); + LocalDateTime maxCreateTime2 = dto2.getMergeSameNameChildren().stream() + .map(FileMetadataInfoResp::getCreateTime) + .filter(Objects::nonNull) + .max(LocalDateTime::compareTo) + .orElse(LocalDateTime.MIN); - return maxCreateTime2.compareTo(maxCreateTime1); // 倒序排序 - }); + return maxCreateTime2.compareTo(maxCreateTime1); // 倒序排序 + }); + } return result; } diff --git a/data/src/main/java/com/sdm/data/service/impl/FileMetadataHierarchyHelper.java b/data/src/main/java/com/sdm/data/service/impl/FileMetadataHierarchyHelper.java new file mode 100644 index 00000000..d0a77beb --- /dev/null +++ b/data/src/main/java/com/sdm/data/service/impl/FileMetadataHierarchyHelper.java @@ -0,0 +1,357 @@ +package com.sdm.data.service.impl; + +import com.sdm.common.common.ThreadLocalContext; +import com.sdm.common.entity.enums.NodeTypeEnum; +import com.sdm.common.utils.FileSizeUtils; +import com.sdm.data.model.entity.FileMetadataInfo; +import com.sdm.data.service.IFileMetadataInfoService; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.CollectionUtils; +import org.springframework.beans.BeanUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.math.BigDecimal; +import java.util.*; +import java.util.function.BiConsumer; +import java.util.stream.Collectors; + +/** + * 文件元数据层级信息帮助类 + * 用于递归查询父目录信息并设置层级关系 + */ +@Slf4j +@Component +public class FileMetadataHierarchyHelper { + + @Autowired + private IFileMetadataInfoService fileMetadataInfoService; + + /** + * 默认递归深度限制 + */ + private static final int DEFAULT_MAX_DEPTH = 10; + + /** + * 回溯时的安全循环限制 + */ + private static final int BACKTRACK_LIMIT = 15; + + /** + * 一站式方法:根据文件ID列表获取文件元数据,构建层级缓存,转换为响应对象并设置层级信息 + * + * @param fileIdList 文件ID列表 + * @param respClass 响应类型 + * @param hierarchySetter 层级信息设置器 + * @param 响应类型 + * @return 响应对象列表 + */ + public List processFileHierarchy( + List fileIdList, + Class respClass, + HierarchySetter hierarchySetter) { + + if (CollectionUtils.isEmpty(fileIdList)) { + return new ArrayList<>(); + } + + // 1. 获取文件元数据 + List currentFiles = getFileMetadataByIds(fileIdList); + + if (CollectionUtils.isEmpty(currentFiles)) { + return new ArrayList<>(); + } + + // 2. 构建父目录缓存 + Map parentCacheMap = buildParentCacheMap(currentFiles); + + // 3. 转换为响应对象并设置层级信息 + return convertToRespWithHierarchy(currentFiles, parentCacheMap, respClass, hierarchySetter); + } + + /** + * 获取文件元数据并构建父目录缓存(用于需要在中间步骤做更多处理的场景) + * + * @param fileIdList 文件ID列表 + * @return 文件元数据和父目录缓存的结果对象 + */ + public FileHierarchyResult getFilesWithParentCache(List fileIdList) { + if (CollectionUtils.isEmpty(fileIdList)) { + return new FileHierarchyResult(new ArrayList<>(), new HashMap<>()); + } + + List currentFiles = getFileMetadataByIds(fileIdList); + Map parentCacheMap = buildParentCacheMap(currentFiles); + + return new FileHierarchyResult(currentFiles, parentCacheMap); + } + + /** + * 文件层级查询结果封装类 + */ + public static class FileHierarchyResult { + private final List files; + private final Map parentCacheMap; + + public FileHierarchyResult(List files, Map parentCacheMap) { + this.files = files; + this.parentCacheMap = parentCacheMap; + } + + public List getFiles() { + return files; + } + + public Map getParentCacheMap() { + return parentCacheMap; + } + } + + /** + * 根据文件ID列表获取文件元数据 + * + * @param fileIdList 文件ID列表 + * @return 文件元数据列表 + */ + public List getFileMetadataByIds(List fileIdList) { + if (CollectionUtils.isEmpty(fileIdList)) { + return new ArrayList<>(); + } + return fileMetadataInfoService.lambdaQuery() + .eq(FileMetadataInfo::getTenantId, ThreadLocalContext.getTenantId()) + .in(FileMetadataInfo::getId, fileIdList) + .list(); + } + + /** + * 批量分层获取所有相关的父目录,构建父目录缓存Map + * + * @param currentFiles 当前文件列表 + * @return key 是 ID,value 是对应的元数据实体 + */ + public Map buildParentCacheMap(List currentFiles) { + return buildParentCacheMap(currentFiles, DEFAULT_MAX_DEPTH); + } + + /** + * 批量分层获取所有相关的父目录,构建父目录缓存Map + * + * @param currentFiles 当前文件列表 + * @param maxDepth 最大递归深度 + * @return key 是 ID,value 是对应的元数据实体 + */ + public Map buildParentCacheMap(List currentFiles, int maxDepth) { + Map parentCacheMap = new HashMap<>(); + + if (CollectionUtils.isEmpty(currentFiles)) { + return parentCacheMap; + } + + // 当前需要去数据库查的父级 ID 集合 + Set nextFetchIds = currentFiles.stream() + .map(FileMetadataInfo::getParentId) + .filter(pid -> pid != null && pid != 0) + .collect(Collectors.toSet()); + + int safetyDepth = 0; // 防死循环计数器 + + // 只要还有没查过的父 ID,且深度在合理范围内,就继续批量查 + while (CollectionUtils.isNotEmpty(nextFetchIds) && safetyDepth < maxDepth) { + // 一次性查出当前这一层所有的父节点信息 + List parents = fileMetadataInfoService.listByIds(nextFetchIds); + if (CollectionUtils.isEmpty(parents)) { + break; + } + + nextFetchIds = new HashSet<>(); // 重置,准备收集下一层 ID + for (FileMetadataInfo p : parents) { + parentCacheMap.put(p.getId(), p); + // 如果这个父节点还有上级,且我们之前没查过这个上级,就加进下一次查询列表 + if (p.getParentId() != null && p.getParentId() != 0 && !parentCacheMap.containsKey(p.getParentId())) { + nextFetchIds.add(p.getParentId()); + } + } + safetyDepth++; + } + + return parentCacheMap; + } + + /** + * 将文件列表转换为响应对象列表,并设置层级信息 + * + * @param currentFiles 当前文件列表 + * @param parentCacheMap 父目录缓存Map + * @param respClass 响应类型 + * @param hierarchySetter 层级信息设置器 + * @param 响应类型 + * @return 响应对象列表 + */ + public List convertToRespWithHierarchy( + List currentFiles, + Map parentCacheMap, + Class respClass, + HierarchySetter hierarchySetter) { + + if (CollectionUtils.isEmpty(currentFiles)) { + return new ArrayList<>(); + } + + return currentFiles.stream().map(file -> { + try { + T resp = respClass.getDeclaredConstructor().newInstance(); + BeanUtils.copyProperties(file, resp); + + // 设置格式化文件大小 + if (file.getFileSize() != null) { + setFormatFileSize(resp, file.getFileSize()); + } + + // 从 parentCacheMap 中回溯,设置层级信息 + backtrackAndSetHierarchy(file, parentCacheMap, resp, hierarchySetter); + + return resp; + } catch (Exception e) { + log.error("转换文件元数据到响应对象失败", e); + throw new RuntimeException("转换失败", e); + } + }).toList(); + } + + /** + * 回溯父目录并设置层级信息 + */ + public void backtrackAndSetHierarchy( + FileMetadataInfo file, + Map parentCacheMap, + T resp, + HierarchySetter hierarchySetter) { + + Long pid = file.getParentId(); + int limit = 0; + + // 这里的循环完全在内存中进行,速度极快且不产生日志 + while (pid != null && parentCacheMap.containsKey(pid) && limit < BACKTRACK_LIMIT) { + FileMetadataInfo folder = parentCacheMap.get(pid); + String ownType = folder.getRelatedResourceUuidOwnType(); + + // 调用层级信息设置器 + hierarchySetter.setHierarchyInfo(resp, folder, ownType); + + pid = folder.getParentId(); + limit++; + } + } + + /** + * 通过反射设置格式化文件大小 + */ + private void setFormatFileSize(T resp, Long fileSize) { + try { + String formatFileSize = FileSizeUtils.formatFileSize(BigDecimal.valueOf(fileSize)); + resp.getClass().getMethod("setFormatFileSize", String.class).invoke(resp, formatFileSize); + } catch (NoSuchMethodException e) { + // 该响应类没有此方法,忽略 + } catch (Exception e) { + log.warn("设置格式化文件大小失败", e); + } + } + + /** + * 层级信息设置器函数式接口 + * + * @param 响应类型 + */ + @FunctionalInterface + public interface HierarchySetter { + /** + * 设置层级信息 + * + * @param resp 响应对象 + * @param folder 父文件夹元数据 + * @param ownType 节点类型 + */ + void setHierarchyInfo(T resp, FileMetadataInfo folder, String ownType); + } + + // ================ 预定义的层级信息设置器 ================ + + /** + * SimulationTaskResultCurveResp 的层级设置器 + * 设置 projectName/projectId, phaseName/phaseId, disciplineName/disciplineId + */ + public static void setSimulationTaskHierarchy(T resp, FileMetadataInfo folder, String ownType) { + try { + if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) { + resp.getClass().getMethod("setProjectName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setProjectId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) { + resp.getClass().getMethod("setPhaseName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setPhaseId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) { + resp.getClass().getMethod("setDisciplineName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setDisciplineId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } + } catch (Exception e) { + log.warn("设置SimulationTask层级信息失败", e); + } + } + + /** + * ListBigFileResp 的层级设置器 + * 设置 ownProjectName/ownProjectId, ownPhaseName/ownPhaseId, + * ownDisciplineName/ownDisciplineId, owntaskName/owntaskId, ownRunName/ownRunId + */ + public static void setListBigFileHierarchy(T resp, FileMetadataInfo folder, String ownType) { + try { + if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnProjectName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnProjectId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnPhaseName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnPhaseId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnDisciplineName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnDisciplineId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.TASK.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwntaskName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwntaskId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.RUN.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnRunName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnRunId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } + } catch (Exception e) { + log.warn("设置ListBigFile层级信息失败", e); + } + } + + public static void setFileHierarchy(T resp, FileMetadataInfo folder, String ownType) { + try { + if (NodeTypeEnum.PROJECT.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnProjectName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnProjectId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.PHASE.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnPhaseName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnPhaseId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.DISCIPLINE.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnDisciplineName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnDisciplineId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.MACHINE.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnMachineName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnMachineId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + }if (NodeTypeEnum.WORKSPACE.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnWorkspaceName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnWorkspaceId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.TASK.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwntaskName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwntaskId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } else if (NodeTypeEnum.RUN.getValue().equals(ownType)) { + resp.getClass().getMethod("setOwnRunName", String.class).invoke(resp, folder.getOriginalName()); + resp.getClass().getMethod("setOwnRunId", String.class).invoke(resp, folder.getRelatedResourceUuid()); + } + } catch (Exception e) { + log.warn("设置ListBigFile层级信息失败", e); + } + } + +} diff --git a/data/src/main/java/com/sdm/data/service/impl/MinioFileIDataFileServiceImpl.java b/data/src/main/java/com/sdm/data/service/impl/MinioFileIDataFileServiceImpl.java index 458fbcd7..173278bb 100644 --- a/data/src/main/java/com/sdm/data/service/impl/MinioFileIDataFileServiceImpl.java +++ b/data/src/main/java/com/sdm/data/service/impl/MinioFileIDataFileServiceImpl.java @@ -147,6 +147,9 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService { @Autowired private FileApproveExecutor fileApproveExecutor; + @Autowired + private FileMetadataHierarchyHelper hierarchyHelper; + @Autowired @Qualifier(value = "nonSensitiveTaskPool") private Executor nonSensitiveTaskPool; @@ -599,20 +602,30 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService { @Override public SdmResponse fileSearch(FileSearchReq minioFileSearchReq) { + if(ObjectUtils.isNotEmpty(minioFileSearchReq.getParentDirId())){ + minioFileSearchReq.getParentDirIds().add(minioFileSearchReq.getParentDirId()); + } + QueryBigFileReq queryBigFileReq = new QueryBigFileReq(); - Long dirId; + List dirIds = new ArrayList<>(); Integer dirType; if (ObjectUtils.isNotEmpty(minioFileSearchReq.getParentUuid())) { // 项目节点下搜索文件 FileMetadataInfo fileMetadataInfo = fileMetadataInfoService.lambdaQuery().eq(FileMetadataInfo::getRelatedResourceUuid, minioFileSearchReq.getParentUuid()).one(); - dirId = fileMetadataInfo.getId(); + if(ObjectUtils.isEmpty(fileMetadataInfo)){ + return SdmResponse.success(); + } + dirIds.add(fileMetadataInfo.getId()); dirType = fileMetadataInfo.getDirType(); - } else if (ObjectUtils.isNotEmpty(minioFileSearchReq.getParentDirId())) { + } else if (ObjectUtils.isNotEmpty(minioFileSearchReq.getParentDirIds())) { // 知识库的文件查询 - FileMetadataInfo fileMetadataInfo = fileMetadataInfoService.getById(minioFileSearchReq.getParentDirId()); - dirId = fileMetadataInfo.getId(); - dirType = fileMetadataInfo.getDirType(); + List fileMetadataInfos = fileMetadataInfoService.listByIds(minioFileSearchReq.getParentDirIds()); + if(ObjectUtils.isEmpty(fileMetadataInfos)){ + return SdmResponse.success(); + } + dirIds.addAll(fileMetadataInfos.stream().map(FileMetadataInfo::getId).toList()); + dirType = fileMetadataInfos.get(0).getDirType(); } else if (ObjectUtils.isNotEmpty(minioFileSearchReq.getDirType())) { dirType = minioFileSearchReq.getDirType(); DirTypeEnum dirTypeByValue = DirTypeEnum.getDirTypeByValue(dirType); @@ -629,7 +642,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService { } // 获取根目录的 id - dirId = fileMetadataInfoByObjectKey.get().getId(); + dirIds.add(fileMetadataInfoByObjectKey.get().getId()); }else { return SdmResponse.failed("请选择目录类型:1 知识库文件夹,2 项目节点文件夹,3 头像库文件夹,4 仿真参数库文件夹,5 训练模型文件夹"); } @@ -638,7 +651,7 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService { queryBigFileReq.setIsLatest(true); queryBigFileReq.setCurrent(minioFileSearchReq.getCurrent()); queryBigFileReq.setSize(minioFileSearchReq.getSize()); - queryBigFileReq.setDirId(dirId); + queryBigFileReq.setDirIds(dirIds); if (Objects.equals(DirTypeEnum.KNOWLEDGE_BASE_DIR.getValue(), dirType)) { // 知识库文件:排除新增在审批的文件 queryBigFileReq.setApproveTypeList(fileDatdList); @@ -654,30 +667,30 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService { SdmResponse>> searchResult = dataStorageAnalysis.listBigFile(queryBigFileReq); - List fileIdList =searchResult.getData().getData().stream().map(FileStorage::getFileId).collect(Collectors.toList()); + List fileIdList = searchResult.getData().getData().stream().map(FileStorage::getFileId).collect(Collectors.toList()); if(CollectionUtils.isEmpty(fileIdList)){ return SdmResponse.success(); } - List files = fileMetadataInfoService - .lambdaQuery() - .in(FileMetadataInfo::getId, fileIdList) - .list(); + + // 使用 hierarchyHelper 获取文件元数据和父目录缓存 + FileMetadataHierarchyHelper.FileHierarchyResult hierarchyResult = hierarchyHelper.getFilesWithParentCache(fileIdList); + List files = hierarchyResult.getFiles(); + Map parentCacheMap = hierarchyResult.getParentCacheMap(); + setCreatorNames(files); setCidInfos(files); setProjectName(files); setAnalysisDirectionName(files); setSimulationPoolAndTaskInfo(files); - List dtoList = files.stream().map(entity -> { - FileMetadataInfoResp dto = new FileMetadataInfoResp(); - BeanUtils.copyProperties(entity, dto); - - //计算当前用户对该文件的综合权限位 - // 对于列表查询,如果层级很深,频繁递归会有性能问题。 - dto.setPermissionValue(fileUserPermissionService.getMergedPermission(entity.getId(), ThreadLocalContext.getUserId())); - return dto; - }).collect(Collectors.toList()); + List dtoList = hierarchyHelper.convertToRespWithHierarchy(files, parentCacheMap, FileMetadataInfoResp.class, FileMetadataHierarchyHelper::setFileHierarchy) + .stream().map(dto -> { + //计算当前用户对该文件的综合权限位 + // 对于列表查询,如果层级很深,频繁递归会有性能问题。 + dto.setPermissionValue(fileUserPermissionService.getMergedPermission(dto.getId(), ThreadLocalContext.getUserId())); + return dto; + }).collect(Collectors.toList()); PageDataResp> pageDataResp = searchResult.getData(); PageInfo page = new PageInfo(); page.setPageNum(pageDataResp.getCurrentPage()); diff --git a/data/src/main/resources/mapper/FileStorageMapper.xml b/data/src/main/resources/mapper/FileStorageMapper.xml index 4d01858c..47acf6bb 100644 --- a/data/src/main/resources/mapper/FileStorageMapper.xml +++ b/data/src/main/resources/mapper/FileStorageMapper.xml @@ -171,9 +171,14 @@ #{approveType} - - AND file_storage.dirId = #{queryBigFileReq.dirId} + + + AND file_storage.dirId IN + + #{dirId} + + AND file_storage.fileSuffix = #{queryBigFileReq.fileSuffix} diff --git a/flowable/repomix-output.xml b/flowable/repomix-output.xml new file mode 100644 index 00000000..ecb69cfa --- /dev/null +++ b/flowable/repomix-output.xml @@ -0,0 +1,5293 @@ +This file is a merged representation of a subset of the codebase, containing specifically included files, combined into a single document by Repomix. + + +This section contains a summary of this file. + + +This file contains a packed representation of a subset of the repository's contents that is considered the most important context. +It is designed to be easily consumable by AI systems for analysis, code review, +or other automated processes. + + + +The content is organized as follows: +1. This summary section +2. Repository information +3. Directory structure +4. Repository files (if enabled) +5. Multiple file entries, each consisting of: + - File path as an attribute + - Full contents of the file + + + +- This file should be treated as read-only. Any changes should be made to the + original repository files, not this packed version. +- When processing this file, use the file path to distinguish + between different files in the repository. +- Be aware that this file may contain sensitive information. Handle it with + the same level of security as you would the original repository. + + + +- Some files may have been excluded based on .gitignore rules and Repomix's configuration +- Binary files are not included in this packed representation. Please refer to the Repository Structure section for a complete list of file paths, including binary files +- Only files matching these patterns are included: **/src/main/java/**/*.java, **/src/main/resources/**/*.yml, **/pom.xml +- Files matching patterns in .gitignore are excluded +- Files matching default ignore patterns are excluded +- Files are sorted by Git change count (files with more changes are at the bottom) + + + + + +pom.xml +src/main/java/com/sdm/flowable/aop/StateGuard.java +src/main/java/com/sdm/flowable/aop/StateGuardAspect.java +src/main/java/com/sdm/flowable/config/RequestConfig.java +src/main/java/com/sdm/flowable/controller/ProcessController.java +src/main/java/com/sdm/flowable/dao/AsyncTaskRecordMapper.java +src/main/java/com/sdm/flowable/dao/ProcessNodeParamMapper.java +src/main/java/com/sdm/flowable/delegate/AsyncResultCheckDelegate.java +src/main/java/com/sdm/flowable/delegate/handler/CloudAppHandler.java +src/main/java/com/sdm/flowable/delegate/handler/DataProcessHandler.java +src/main/java/com/sdm/flowable/delegate/handler/ExecutionHandler.java +src/main/java/com/sdm/flowable/delegate/handler/ExportWordScriptHandler.java +src/main/java/com/sdm/flowable/delegate/handler/HpcHandler.java +src/main/java/com/sdm/flowable/delegate/handler/HttpHandler.java +src/main/java/com/sdm/flowable/delegate/handler/LocalAppHandler.java +src/main/java/com/sdm/flowable/delegate/UniversalDelegate.java +src/main/java/com/sdm/flowable/dto/ProcessDefinitionInfo.java +src/main/java/com/sdm/flowable/dto/req/CompleteTaskReq.java +src/main/java/com/sdm/flowable/dto/req/PreviewNodeInputFilesReq.java +src/main/java/com/sdm/flowable/dto/req/RetryRequest.java +src/main/java/com/sdm/flowable/dto/resp/NodeInputFilePreviewResp.java +src/main/java/com/sdm/flowable/entity/AsyncTaskRecord.java +src/main/java/com/sdm/flowable/entity/ProcessNodeParam.java +src/main/java/com/sdm/flowable/enums/AsyncTaskStatusEnum.java +src/main/java/com/sdm/flowable/enums/FlowElementTypeEnums.java +src/main/java/com/sdm/flowable/enums/NodeStateEnum.java +src/main/java/com/sdm/flowable/enums/OperationTypeEnum.java +src/main/java/com/sdm/flowable/enums/ProcessInstanceStateEnum.java +src/main/java/com/sdm/flowable/filter/AuthFilter.java +src/main/java/com/sdm/flowable/FlowableApplication.java +src/main/java/com/sdm/flowable/listener/RetryRedirectListener.java +src/main/java/com/sdm/flowable/listener/UserTaskDirectoryPreparationListener.java +src/main/java/com/sdm/flowable/process/Iprocess.java +src/main/java/com/sdm/flowable/process/ProcessService.java +src/main/java/com/sdm/flowable/service/IAsyncTaskRecordService.java +src/main/java/com/sdm/flowable/service/impl/AsyncTaskRecordServiceImpl.java +src/main/java/com/sdm/flowable/service/impl/NodeExecutionStrategy.java +src/main/java/com/sdm/flowable/service/impl/ProcessNodeParamServiceImpl.java +src/main/java/com/sdm/flowable/service/impl/ProcessStateHelper.java +src/main/java/com/sdm/flowable/service/IProcessNodeParamService.java +src/main/java/com/sdm/flowable/util/Dto2BpmnConverter.java +src/main/java/com/sdm/flowable/util/FlowNodeIdUtils.java +src/main/resources/application-dev-190.yml +src/main/resources/application-dev-65.yml +src/main/resources/application-local.yml +src/main/resources/application-lyric.yml +src/main/resources/application-prod.yml +src/main/resources/application.yml + + + +This section contains the contents of the repository's files. + + + + + 4.0.0 + + com.sdm + SDM + 1.0-SNAPSHOT + + com.sdm + flowable + 0.0.1-SNAPSHOT + flowable + flowable + + + 17 + UTF-8 + UTF-8 + 3.3.5 + 7.0.1 + + + + + + org.springframework.boot + spring-boot-dependencies + ${spring-boot.version} + pom + import + + + + + + + + + com.sdm + common + 0.0.1-SNAPSHOT + + + * + * + + + + + + org.springframework.boot + spring-boot-starter + + + + org.springframework.boot + spring-boot-starter-web + + + + + org.flowable + flowable-spring-boot-starter + ${flowable.version} + + + + com.google.guava + guava + 31.1-jre + + + + + org.springframework.boot + spring-boot-starter-test + + + + org.aspectj + aspectjweaver + + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + + + + +package com.sdm.flowable.aop; + +import com.sdm.flowable.enums.OperationTypeEnum; +import java.lang.annotation.*; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@Documented +public @interface StateGuard { + + /** + * 当前接口对应的操作类型 + */ + OperationTypeEnum type(); + + /** + * 流程实例ID的参数名 (支持 SpEL) + * 例如: "#processInstanceId" 或 "#req.processInstanceId" + */ + String idParam() default "#processInstanceId"; +} + + + +package com.sdm.flowable.aop; + +import com.sdm.common.common.SdmResponse; + +import com.sdm.flowable.enums.OperationTypeEnum; +import com.sdm.flowable.enums.ProcessInstanceStateEnum; +import com.sdm.flowable.service.impl.ProcessStateHelper; +import lombok.extern.slf4j.Slf4j; +import org.aspectj.lang.ProceedingJoinPoint; +import org.aspectj.lang.annotation.Around; +import org.aspectj.lang.annotation.Aspect; +import org.aspectj.lang.reflect.MethodSignature; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.StandardReflectionParameterNameDiscoverer; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.stereotype.Component; +import org.springframework.util.StringUtils; + +import java.lang.reflect.Method; + +@Slf4j +@Aspect +@Component +public class StateGuardAspect { + + @Autowired + private ProcessStateHelper processStateHelper; + + private final ExpressionParser parser = new SpelExpressionParser(); + private final StandardReflectionParameterNameDiscoverer discoverer = new StandardReflectionParameterNameDiscoverer(); + + @Around("@annotation(stateGuard)") + public Object checkState(ProceedingJoinPoint point, StateGuard stateGuard) throws Throwable { + // 1. 获取 processInstanceId + String processInstanceId = getProcessInstanceId(point, stateGuard.idParam()); + + if (!StringUtils.hasText(processInstanceId)) { + throw new IllegalArgumentException("状态守卫拦截失败:无法获取流程实例ID"); + } + + // 2. 获取当前真实状态 + ProcessInstanceStateEnum currentState; + try { + currentState = processStateHelper.determineProcessState(processInstanceId); + } catch (IllegalArgumentException e) { + return SdmResponse.failed(e.getMessage()); + } + + // 3. 校验权限矩阵 + OperationTypeEnum operation = stateGuard.type(); + if (!operation.getAllowStates().contains(currentState)) { + String msg = String.format("非法操作!当前状态 [%s] 不允许执行 [%s] 操作。允许的状态为: %s", + currentState, operation, operation.getAllowStates()); + log.warn("状态机拦截: procId={}, {}", processInstanceId, msg); + return SdmResponse.failed(msg); + } + + // 4. 校验通过,放行 + return point.proceed(); + } + + /** + * 利用 SpEL 解析参数值 + */ + private String getProcessInstanceId(ProceedingJoinPoint point, String keyExpression) { + MethodSignature signature = (MethodSignature) point.getSignature(); + Method method = signature.getMethod(); + Object[] args = point.getArgs(); + String[] paramNames = discoverer.getParameterNames(method); + + if (paramNames == null) return null; + + EvaluationContext context = new StandardEvaluationContext(); + for (int i = 0; i < paramNames.length; i++) { + context.setVariable(paramNames[i], args[i]); + } + + try { + Expression expression = parser.parseExpression(keyExpression); + return expression.getValue(context, String.class); + } catch (Exception e) { + log.error("SpEL解析流程ID失败: {}", keyExpression, e); + return null; + } + } +} + + + +package com.sdm.flowable.config; + +import com.sdm.flowable.filter.AuthFilter; +import org.springframework.boot.web.servlet.FilterRegistrationBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class RequestConfig { + @Bean + public AuthFilter authFilter() { + return new AuthFilter(); + } + + @Bean + public FilterRegistrationBean registrationBean() { + FilterRegistrationBean registration = new FilterRegistrationBean<>(); + registration.setFilter(authFilter()); + registration.setName("authFilter"); + registration.addUrlPatterns("/*"); + registration.setOrder(0); + return registration; + } +} + + + +package com.sdm.flowable.controller; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.sdm.common.common.SdmResponse; +import com.sdm.common.entity.flowable.dto.ProcessDefinitionDTO; +import com.sdm.common.entity.flowable.executeConfig.HPCExecuteConfig; +import com.sdm.common.entity.req.flowable.AsyncCallbackRequest; +import com.sdm.common.entity.resp.flowable.DeployFlowableResp; +import com.sdm.common.entity.resp.flowable.ProcessInstanceDetailResponse; +import com.sdm.common.entity.resp.flowable.ProcessInstanceResp; +import com.sdm.common.feign.inter.flowable.IFlowableFeignClient; +import com.sdm.flowable.aop.StateGuard; +import com.sdm.flowable.delegate.handler.HpcHandler; +import com.sdm.flowable.dto.req.CompleteTaskReq; +import com.sdm.flowable.dto.req.PreviewNodeInputFilesReq; +import com.sdm.flowable.dto.req.RetryRequest; +import com.sdm.flowable.dto.resp.NodeInputFilePreviewResp; +import com.sdm.flowable.enums.OperationTypeEnum; +import com.sdm.flowable.process.ProcessService; +import com.sdm.flowable.service.IProcessNodeParamService; +import lombok.extern.slf4j.Slf4j; +import org.flowable.bpmn.model.FlowElement; +import org.flowable.bpmn.model.FlowableListener; +import org.flowable.engine.delegate.DelegateExecution; +import org.flowable.engine.delegate.ReadOnlyDelegateExecution; +import org.flowable.engine.runtime.ProcessInstance; +import org.flowable.validation.ValidationError; +import org.flowable.variable.api.persistence.entity.VariableInstance; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import java.util.*; + +@Slf4j +@RestController +@RequestMapping("/process") +public class ProcessController implements IFlowableFeignClient { + @Autowired + private ProcessService processService; + + @Autowired + private IProcessNodeParamService processNodeParamService; + + @Autowired + private HpcHandler hpcHandler; + + private final ObjectMapper objectMapper = new ObjectMapper(); + + // 验证流程模型 + @PostMapping("/validate") + public Map validate(@RequestBody ProcessDefinitionDTO processDTO) { + Map result = new HashMap<>(); + + try { + List errors = processService.validateModel(processDTO); + if (errors.isEmpty()) { + result.put("valid", true); + result.put("message", "模型验证通过"); + } else { + result.put("valid", false); + result.put("errors", errors); + } + } catch (Exception e) { + result.put("valid", false); + result.put("message", "验证过程中发生错误: " + e.getMessage()); + } + + return result; + } + + + // 部署流程 + @PostMapping("/deploy") + public SdmResponse deploy(@RequestBody ProcessDefinitionDTO processDTO) { + try { + return processService.deploy(processDTO); + } catch (Exception e) { + log.error("流程部署失败: ", e); + return SdmResponse.failed("流程部署失败"); + } + } + + /** + * 删除所有流程部署 + */ + @GetMapping("/deleteAllDeployments") + public void deleteAllDeployments() { + log.info("开始删除所有流程部署"); + processService.deleteAllDeployments(); + } + + + // 保存节点用户输入参数(先基于流程定义ID,等流程启动后,保存流程实例ID,作为参数模板) + @PostMapping("/saveParamsByDefinitionId") + public SdmResponse saveParamsByDefinitionId(@RequestParam String processDefinitionId, @RequestParam String nodeId, @RequestParam String runId, + @RequestBody Map params) { + log.info("保存节点参数:processDefinitionId:{},nodeId:{}, runId:{},params:{}", processDefinitionId,nodeId, runId,params); + processNodeParamService.saveParamByProcessDefinitionId(processDefinitionId, nodeId, runId, params); + return SdmResponse.success(); + } + + // 启动流程实例后,更新流程参数的流程实例id + @PostMapping("/updateNodeParamProcessInstanceId") + public SdmResponse updateNodeParamProcessInstanceId(@RequestParam String runId, @RequestParam String processDefinitionId, @RequestParam String processInstanceId) { + log.info("更新节点参数流程实例ID:runId:{},processDefinitionId:{}, processInstanceId:{}", runId,processDefinitionId ,processInstanceId); + processNodeParamService.updateNodeParamProcessInstanceId(runId,processDefinitionId ,processInstanceId); + return SdmResponse.success(); + } + + /** + * 根据流程定义ID启动流程实例 + * + * @param processDefinitionId 流程定义ID(指定版本) + * @param variables 可选的流程启动变量 + */ + @PostMapping("/startByProcessDefinitionId") + public SdmResponse startByProcessDefinitionId( + @RequestParam String processDefinitionId, + @RequestBody(required = false) Map variables) { + log.info("开始启动流程定义: {}",processDefinitionId); + ProcessInstance processInstance = processService.startByProcessDefinitionId(processDefinitionId, variables); + ProcessInstanceResp processInstanceResp = new ProcessInstanceResp(); + processInstanceResp.setProcessInstanceId(processInstance.getId()); + processInstanceResp.setProcessDefinitionId(processInstance.getProcessDefinitionId()); + processInstanceResp.setProcessDefinitionKey(processInstance.getProcessDefinitionKey()); + processInstanceResp.setProcessDefinitionName(processInstance.getProcessDefinitionName()); + processInstanceResp.setProcessDefinitionVersion(processInstance.getProcessDefinitionVersion()); + processInstanceResp.setBusinessKey(processInstance.getBusinessKey()); + processInstanceResp.setStartUserId(processInstance.getStartUserId()); + processInstanceResp.setStartTime(processInstance.getStartTime()); + processInstanceResp.setSuspended(processInstance.isSuspended()); + return SdmResponse.success(processInstanceResp); + } + + + /** + * 挂起流程实例 + * @param processInstanceId 流程实例ID + * @return + */ + @GetMapping("/suspendProcessInstance") + @StateGuard(type = OperationTypeEnum.SUSPEND, idParam = "#processInstanceId") + SdmResponse suspendProcessInstance(@RequestParam String processInstanceId){ + return processService.suspendProcessInstance(processInstanceId); + } + + /** + * 激活流程实例 + * @param processInstanceId 流程实例ID + * @return + */ + @GetMapping("/activateProcessInstance") + @StateGuard(type = OperationTypeEnum.ACTIVATE, idParam = "#processInstanceId") + SdmResponse activateProcessInstance(@RequestParam String processInstanceId){ + return processService.activateProcessInstance(processInstanceId); + } + + /** + * 取消流程实例 + * @param processInstanceId 流程实例ID + * @return + */ + @GetMapping("/cancelProcessInstance") + @StateGuard(type = OperationTypeEnum.TERMINATE, idParam = "#processInstanceId") + SdmResponse cancelProcessInstance(@RequestParam String processInstanceId){ + return processService.cancelProcessInstance(processInstanceId); + } + + /** + * 查询流程实例及所有节点的详细状态(返回结构化 DTO) + * 如果只传了processDefinitionId,根据流程定义返回流程基本信息和节点信息, + * 如果还传了processInstanceId,再封装流程状态和节点状态 + */ + @GetMapping("/getProcessAndNodeDetailByInstanceId") + public SdmResponse getProcessAndNodeDetailByInstanceId(@RequestParam String processDefinitionId,@RequestParam(required = false) String processInstanceId,@RequestParam(required = false) String runId) { + log.info("查询流程实例及所有节点的详细状态:processDefinitionId:{},processInstanceId:{},runId:{}", processDefinitionId,processInstanceId,runId); + return processService.getProcessAndNodeDetailByInstanceId(processDefinitionId,processInstanceId,runId); + } + + /** + * 预览节点输入文件列表 + * 逻辑: + * 1. 扫描当前节点 inputDirId (用户手动上传区) -> 全量返回 + * 2. 扫描前置节点 outputDirId (上游产出区) -> 根据 regex 过滤返回 + * 3. 统一封装为包含绝对路径的 DTO + */ + @PostMapping("/previewNodeInputFiles") + public SdmResponse> previewNodeInputFiles(@RequestBody PreviewNodeInputFilesReq previewNodeInputFilesReq) { + return processService.previewNodeInputFiles(previewNodeInputFilesReq); + } + + + + + /** + * 流程节点继续执行(完成人工节点/或者等待用户输入后继续手动执行的节点) + * + * @param req + * @return + */ + @PostMapping("/continueServiceTask") + @StateGuard(type = OperationTypeEnum.EXECUTE, idParam = "#req.processInstanceId") + public SdmResponse continueServiceTask(@RequestBody CompleteTaskReq req) { + return processService.continueServiceTask(req); + } + + /** + * 异步任务回调接口,用于唤醒等待的流程实例 + * + * @param request 包含异步任务ID和执行结果的请求对象 + */ + @PostMapping("/asyncCallback") + public SdmResponse asyncCallback(@RequestBody AsyncCallbackRequest request) { + log.info("开始处理异步回调请求: {}", request); + // 发送信号唤醒流程实例中等待的节点 + processService.asyncCallback(request); + return SdmResponse.success(); + } + + /** + * 重试任务,目前只能重试当前失败的节点 + */ + @PostMapping("/retryFailedNode") + @StateGuard(type = OperationTypeEnum.RETRY, idParam = "#processInstanceId") + public SdmResponse retryFailedNode(@RequestParam String processInstanceId, @RequestParam String failNodeId) { + log.info("开始重试任务: {}",failNodeId); + try { + processService.retryFailedNode(processInstanceId, failNodeId); + return SdmResponse.success("重试任务已提交"); + } catch (Exception e) { + return SdmResponse.failed("重试失败"); + } + } + /** + * 用户点击"重试"按钮,传入目标节点ID ,可以从任意节点重试 + * + * @param request 重试请求参数,包括流程实例ID、目标节点ID和新变量 + * @return 重试结果 + */ + @PostMapping("/retryToNode") + @StateGuard(type = OperationTypeEnum.RETRY, idParam = "#request.procInstId") + public SdmResponse retryToNode(@RequestBody RetryRequest request) { + log.info("开始重试任务: {}", request); + try { + processService.retryToNode(request.getProcInstId(), request.getTargetNodeId(), request.getVariables()); + return SdmResponse.success("重试任务已提交"); + } catch (Exception e) { + return SdmResponse.failed("重试失败: " + e.getMessage()); + } + } + + // mock验证HPC流程使用 + @PostMapping("/testHpc") + public String testHpc(@RequestBody Map params) { + String beforeNodeId = params.get("beforeNodeId").toString(); + HPCExecuteConfig config=new HPCExecuteConfig(); + config.setBeforeNodeId(beforeNodeId); + if(!Objects.isNull(params.get("masterFileRegularStr"))){ + config.setMasterFileRegularStr(params.get("masterFileRegularStr").toString()); + } + if(!Objects.isNull(params.get("inputFilesRegularStr"))){ + config.setInputFilesRegularStr(params.get("inputFilesRegularStr").toString()); + } + String currentNodeId = params.get("currentNodeId").toString(); + DelegateExecution execution = new DelegateExecution() { + @Override + public String getId() { + return ""; + } + + @Override + public String getProcessInstanceId() { + return ""; + } + + @Override + public String getRootProcessInstanceId() { + return ""; + } + + @Override + public String getEventName() { + return ""; + } + + @Override + public void setEventName(String eventName) { + + } + + @Override + public String getProcessInstanceBusinessKey() { + return ""; + } + + @Override + public String getProcessInstanceBusinessStatus() { + return ""; + } + + @Override + public String getProcessDefinitionId() { + return ""; + } + + @Override + public String getPropagatedStageInstanceId() { + return ""; + } + + @Override + public String getParentId() { + return ""; + } + + @Override + public String getSuperExecutionId() { + return ""; + } + + @Override + public String getCurrentActivityId() { + return currentNodeId; + } + + @Override + public String getTenantId() { + return ""; + } + + @Override + public FlowElement getCurrentFlowElement() { + return null; + } + + @Override + public void setCurrentFlowElement(FlowElement flowElement) { + + } + + @Override + public FlowableListener getCurrentFlowableListener() { + return null; + } + + @Override + public void setCurrentFlowableListener(FlowableListener currentListener) { + + } + + @Override + public ReadOnlyDelegateExecution snapshotReadOnly() { + return null; + } + + @Override + public DelegateExecution getParent() { + return null; + } + + @Override + public List getExecutions() { + return List.of(); + } + + @Override + public void setActive(boolean isActive) { + + } + + @Override + public boolean isActive() { + return false; + } + + @Override + public boolean isEnded() { + return false; + } + + @Override + public void setConcurrent(boolean isConcurrent) { + + } + + @Override + public boolean isConcurrent() { + return false; + } + + @Override + public boolean isProcessInstanceType() { + return false; + } + + @Override + public void inactivate() { + + } + + @Override + public boolean isScope() { + return false; + } + + @Override + public void setScope(boolean isScope) { + + } + + @Override + public boolean isMultiInstanceRoot() { + return false; + } + + @Override + public void setMultiInstanceRoot(boolean isMultiInstanceRoot) { + + } + + @Override + public Map getVariables() { + return Map.of(); + } + + @Override + public Map getVariableInstances() { + return Map.of(); + } + + @Override + public Map getVariables(Collection variableNames) { + return Map.of(); + } + + @Override + public Map getVariableInstances(Collection variableNames) { + return Map.of(); + } + + @Override + public Map getVariables(Collection variableNames, boolean fetchAllVariables) { + return Map.of(); + } + + @Override + public Map getVariableInstances(Collection variableNames, boolean fetchAllVariables) { + return Map.of(); + } + + @Override + public Map getVariablesLocal() { + return Map.of(); + } + + @Override + public Map getVariableInstancesLocal() { + return Map.of(); + } + + @Override + public Map getVariablesLocal(Collection variableNames) { + return Map.of(); + } + + @Override + public Map getVariableInstancesLocal(Collection variableNames) { + return Map.of(); + } + + @Override + public Map getVariablesLocal(Collection variableNames, boolean fetchAllVariables) { + return Map.of(); + } + + @Override + public Map getVariableInstancesLocal(Collection variableNames, boolean fetchAllVariables) { + return Map.of(); + } + + @Override + public Object getVariable(String variableName) { + return null; + } + + @Override + public VariableInstance getVariableInstance(String variableName) { + return null; + } + + @Override + public Object getVariable(String variableName, boolean fetchAllVariables) { + return null; + } + + @Override + public VariableInstance getVariableInstance(String variableName, boolean fetchAllVariables) { + return null; + } + + @Override + public Object getVariableLocal(String variableName) { + return null; + } + + @Override + public VariableInstance getVariableInstanceLocal(String variableName) { + return null; + } + + @Override + public Object getVariableLocal(String variableName, boolean fetchAllVariables) { + return null; + } + + @Override + public VariableInstance getVariableInstanceLocal(String variableName, boolean fetchAllVariables) { + return null; + } + + @Override + public T getVariable(String variableName, Class variableClass) { + return null; + } + + @Override + public T getVariableLocal(String variableName, Class variableClass) { + return null; + } + + @Override + public Set getVariableNames() { + return Set.of(); + } + + @Override + public Set getVariableNamesLocal() { + return Set.of(); + } + + @Override + public void setVariable(String variableName, Object value) { + + } + + @Override + public void setVariable(String variableName, Object value, boolean fetchAllVariables) { + + } + + @Override + public Object setVariableLocal(String variableName, Object value) { + return null; + } + + @Override + public Object setVariableLocal(String variableName, Object value, boolean fetchAllVariables) { + return null; + } + + @Override + public void setVariables(Map variables) { + + } + + @Override + public void setVariablesLocal(Map variables) { + + } + + @Override + public boolean hasVariables() { + return false; + } + + @Override + public boolean hasVariablesLocal() { + return false; + } + + @Override + public boolean hasVariable(String variableName) { + return false; + } + + @Override + public boolean hasVariableLocal(String variableName) { + return false; + } + + @Override + public void removeVariable(String variableName) { + + } + + @Override + public void removeVariableLocal(String variableName) { + + } + + @Override + public void removeVariables(Collection variableNames) { + + } + + @Override + public void removeVariablesLocal(Collection variableNames) { + + } + + @Override + public void removeVariables() { + + } + + @Override + public void removeVariablesLocal() { + + } + + @Override + public void setTransientVariable(String variableName, Object variableValue) { + + } + + @Override + public void setTransientVariableLocal(String variableName, Object variableValue) { + + } + + @Override + public void setTransientVariables(Map transientVariables) { + + } + + @Override + public Object getTransientVariable(String variableName) { + return null; + } + + @Override + public Map getTransientVariables() { + return Map.of(); + } + + @Override + public void setTransientVariablesLocal(Map transientVariables) { + + } + + @Override + public Object getTransientVariableLocal(String variableName) { + return null; + } + + @Override + public Map getTransientVariablesLocal() { + return Map.of(); + } + + @Override + public void removeTransientVariableLocal(String variableName) { + + } + + @Override + public void removeTransientVariable(String variableName) { + + } + + @Override + public void removeTransientVariables() { + + } + + @Override + public void removeTransientVariablesLocal() { + + } + }; + hpcHandler.execute(execution,params,config); + return "ok"; + } + + +} + + + +package com.sdm.flowable.dao; + +import com.sdm.flowable.entity.AsyncTaskRecord; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + *

+ * 异步任务执行记录表 Mapper 接口 + *

+ * + * @author author + * @since 2025-11-26 + */ +public interface AsyncTaskRecordMapper extends BaseMapper { + +} +
+ + +package com.sdm.flowable.dao; + +import com.sdm.flowable.entity.ProcessNodeParam; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + *

+ * 流程节点输入参数表 Mapper 接口 + *

+ * + * @author author + * @since 2025-11-25 + */ +public interface ProcessNodeParamMapper extends BaseMapper { + +} +
+ + +package com.sdm.flowable.delegate; + +import org.flowable.engine.delegate.DelegateExecution; +import org.flowable.engine.delegate.JavaDelegate; +import org.springframework.stereotype.Component; + +import static com.sdm.common.config.FlowableConfig.RECEIVETASK_CALLBACKE_MSG; +import static com.sdm.common.config.FlowableConfig.RECEIVETASK_CALLBACKE_STATUS; + +/** + * 异步结果校验 Delegate + */ +@Component("asyncResultCheckDelegate") +public class AsyncResultCheckDelegate implements JavaDelegate { + + @Override + public void execute(DelegateExecution execution) { + // 1. 从 Local 变量中获取状态 + // Flowable 的 getVariable 会自动查找 Local -> Parent,所以 setVariableLocal 后这里能取到 + String status = (String) execution.getVariable(RECEIVETASK_CALLBACKE_STATUS); + String msg = (String) execution.getVariable(RECEIVETASK_CALLBACKE_MSG); + + // 防御性检查:如果变量丢失(极低概率,除非手动操作了数据库),默认认为是 SUCCESS 防止卡死,或者抛错 + if (status == null) { + // 这里的策略取决于业务:是让它通过还是报错? + // 工业场景建议报错,宁可误报不可漏报 + throw new RuntimeException("系统异常:未获取到HPC执行状态,请联系管理员检查 AsyncResultCheckDelegate"); + } + + // 2. 核心校验逻辑 + if ("FAIL".equals(status)) { + // 【自爆】抛出 RuntimeException + // Flowable 会捕获此异常 -> 减少重试次数 -> 最终移动到 DeadLetterJob 表 -> 节点变红 + String errorInfo = (msg != null && !msg.isEmpty()) ? msg : "外部系统返回失败状态,未提供详细信息"; + throw new RuntimeException("HPC任务执行失败: " + errorInfo); + } + + // 3. SUCCESS 的情况 + // 什么都不做,Flowable 会自动结束当前 ServiceTask,流转到下一个节点 + // 可选:清理 Local 变量保持环境干净 + execution.removeVariableLocal(RECEIVETASK_CALLBACKE_STATUS); + execution.removeVariableLocal(RECEIVETASK_CALLBACKE_MSG); + } +} + + + +package com.sdm.flowable.delegate.handler; + +import com.sdm.common.entity.flowable.executeConfig.CloudAppExecuteConfig; +import org.flowable.engine.delegate.DelegateExecution; +import org.springframework.stereotype.Component; + +import java.util.Map; + +// 云应用处理器(executeType=cloudApp) +@Component("cloudApp") +public class CloudAppHandler implements ExecutionHandler,CloudAppExecuteConfig> { + @Override + public void execute(DelegateExecution execution, Map params, CloudAppExecuteConfig config) { + // 实现云应用处理逻辑... + } +} + + + +package com.sdm.flowable.delegate.handler; + +import com.sdm.common.entity.flowable.executeConfig.BaseExecuteConfig; +import com.sdm.common.entity.flowable.executeConfig.DataProcessExecuteConfig; +import org.flowable.engine.delegate.DelegateExecution; +import org.springframework.stereotype.Component; + +import java.util.Map; + +// 数据处理执行器(executeType=data_process) +@Component("dataProcess") +public class DataProcessHandler implements ExecutionHandler,DataProcessExecuteConfig> { + @Override + public void execute(DelegateExecution execution, Map params, DataProcessExecuteConfig config) { + // 实现数据处理逻辑... + } +} + + + +package com.sdm.flowable.delegate.handler; + +import com.sdm.common.entity.flowable.executeConfig.BaseExecuteConfig; +import org.flowable.engine.delegate.DelegateExecution; + +import java.util.Map; + +public interface ExecutionHandler { + void execute(DelegateExecution execution, P params, T config); +} + + + +package com.sdm.flowable.delegate.handler; + +import com.alibaba.fastjson2.JSONObject; +import com.sdm.common.common.SdmResponse; +import com.sdm.common.common.ThreadLocalContext; +import com.sdm.common.entity.flowable.executeConfig.ExportWordScriptExecuteConfig; +import com.sdm.common.entity.req.data.GetFileBaseInfoReq; +import com.sdm.common.entity.req.data.UploadFilesReq; +import com.sdm.common.entity.req.project.ProjecInfoReq; +import com.sdm.common.entity.req.project.SimulationPerformance; +import com.sdm.common.entity.req.project.SpdmReportReq; +import com.sdm.common.entity.resp.data.FileMetadataInfoResp; +import com.sdm.common.entity.resp.task.PerformanceResp; +import com.sdm.common.feign.inter.data.IDataFeignClient; +import com.sdm.common.feign.inter.project.ISimulationRunFeignClient; +import com.sdm.common.feign.inter.task.ISimuluationPerformanceFeignClient; +import com.sdm.common.config.FlowableConfig; +import com.sdm.flowable.entity.ProcessNodeParam; +import com.sdm.flowable.service.IProcessNodeParamService; +import lombok.extern.slf4j.Slf4j; +import org.flowable.engine.delegate.DelegateExecution; +import org.springframework.beans.BeanUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.web.multipart.MultipartFile; +import org.springframework.mock.web.MockMultipartFile; + +import java.io.*; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; + +/** + * @Description: 生成自动化报告脚本处理器 + * @Author: shiman + * @Date: 2023-03-05 23:09 + */ +@Slf4j +@Component("exportWordScript") +public class ExportWordScriptHandler implements ExecutionHandler,ExportWordScriptExecuteConfig> { + @Autowired + private IDataFeignClient dataFeignClient; + + @Autowired + private IProcessNodeParamService processNodeParamService; + + @Autowired + private ISimuluationPerformanceFeignClient simuluationPerformanceFeignClient; + + @Autowired + private ISimulationRunFeignClient simulationRunFeignClient; + + @Override + public void execute(DelegateExecution execution, Map params, ExportWordScriptExecuteConfig config) { + try { + // 获取前置节点参数 + String beforeNodeId = config.getBeforeNodeId(); + String currentNodeId =execution.getCurrentActivityId(); + String fileRegularStr = config.getFileRegularStr(); + + // 获取当前流程实例参数 + String runId = (String) execution.getVariable("runId"); + Long userId = (Long) execution.getVariable("userId"); + String userName = (String) execution.getVariable("userName"); + Long tenantId = (Long) execution.getVariable("tenantId"); + ThreadLocalContext.setUserId(userId); + ThreadLocalContext.setUserName(userName); + ThreadLocalContext.setTenantId(tenantId); + String processDefinitionId = execution.getProcessDefinitionId(); + log.info("ExportWordScriptHandler 开始执行 runId:{},userId:{},userName:{},tenantId:{},processDefinitionId:{}, beforeNodeId:{}, currentNodeId:{},fileRegularStr:{}", runId,userId,userName,tenantId,processDefinitionId, beforeNodeId, currentNodeId,fileRegularStr); + + ProjecInfoReq projecInfoReq = buildprojectInfoReq(params); + log.info("ExportWordScriptHandler的请求参数 projectInfoReq:{}", projecInfoReq); + + + SdmResponse> runPerformance = simuluationPerformanceFeignClient.getRunPerformance(runId); + if(!runPerformance.isSuccess()){ + log.error("获取算列性能指标失败"); + throw new RuntimeException("获取算列性能指标失败"); + } + List performanceList = new ArrayList<>(); + for (PerformanceResp datum : runPerformance.getData()) { + SimulationPerformance performance = new SimulationPerformance(); + BeanUtils.copyProperties(datum, performance); + performanceList.add(performance); + } + log.info("ExportWordScriptHandler的返回参数 runPerformance:{}", runPerformance); + + SdmResponse> simulationKeyResultFileIds = simulationRunFeignClient.getSimulationKeyResultFileIds(runId); + if(!simulationKeyResultFileIds.isSuccess()){ + log.error("获取算列关键结果文件失败"); + throw new RuntimeException("获取算列关键结果文件失败"); + } + log.info("ExportWordScriptHandler的返回参数 simulationKeyResultFileIds:{}", simulationKeyResultFileIds); + + + ProcessNodeParam currentProcessNodeParam = processNodeParamService.lambdaQuery() + .eq(ProcessNodeParam::getRunId, runId) + .eq(ProcessNodeParam::getNodeId, currentNodeId) + .eq(ProcessNodeParam::getProcessDefinitionId, processDefinitionId) + .one(); + + + // 获取当前节点输出文件夹信息 + String currentNodeParamJson = currentProcessNodeParam.getParamJson(); + JSONObject currentParamJsonObject = JSONObject.parseObject(currentNodeParamJson); + Long currentNodeOutputDirId = currentParamJsonObject.getLong("outputDirId"); + FileMetadataInfoResp currentNodeFileMetadataInfoResp = getFileBaseInfo(currentNodeOutputDirId); + String currentNodeObjectKey = currentNodeFileMetadataInfoResp.getObjectKey(); + log.info("当前节点配置参数:{}", currentNodeParamJson); + String currentNodeOutputDirPath = FlowableConfig.FLOWABLE_SIMULATION_BASEDIR + currentNodeObjectKey; + log.info("当前节点输出文件夹:{}", currentNodeOutputDirPath); + + // todo 生成脚本的接口 + SpdmReportReq req = new SpdmReportReq(); + req.setProjecInfoReq(projecInfoReq); + req.setOutPutDirPath(currentNodeOutputDirPath); + req.setImageFileIdList(simulationKeyResultFileIds.getData()); + req.setPerformanceList(performanceList); + SdmResponse voidSdmResponse = simulationRunFeignClient.generateReportInternal(req); + if(!voidSdmResponse.isSuccess()){ + log.error("生成自动化报告失败"); + throw new RuntimeException("生成自动化报告失败"); + } + try { + String reportPath = currentNodeOutputDirPath + "report.docx"; + log.info("报告路径:{}", reportPath); + // 获取临时路径中脚本生成的报告 + uploadResultFileToMinio(currentNodeOutputDirPath + "report.docx",currentNodeOutputDirId); + } catch (Exception ex) { + log.error("生成自动化报告失败:{}", ex.getMessage(), ex); + throw new RuntimeException("生成自动化报告失败"); + } + + } catch (Exception e) { + log.error("执行ExportWordScript失败", e); + throw new RuntimeException("执行ExportWordScript失败: " + e.getMessage(), e); + } + } + + private static ProjecInfoReq buildprojectInfoReq(Map params) { + ProjecInfoReq projectInfoReq = new ProjecInfoReq(); + projectInfoReq.setDepartment((String)params.get("department")); + projectInfoReq.setApplicants((String)params.get("applicants")); + projectInfoReq.setDate((String)params.get("date")); + projectInfoReq.setProjectNum((String)params.get("projectNum")); + projectInfoReq.setWorkspaceNum((String)params.get("workspaceNum")); + projectInfoReq.setWorkspace((String)params.get("workspace")); + projectInfoReq.setTaskType((String)params.get("taskType")); + projectInfoReq.setReportVer((String)params.get("reportVer")); + projectInfoReq.setFileNum((String)params.get("fileNum")); + projectInfoReq.setFormulateTime((String)params.get("formulateTime")); + projectInfoReq.setCheckTime((String)params.get("checkTime")); + projectInfoReq.setApproveTime((String)params.get("approveTime")); + projectInfoReq.setIsBatch(Boolean.parseBoolean((String) params.get("isBatch"))); + projectInfoReq.setLoadcaseName((String)params.get("loadcaseName")); + projectInfoReq.setReportCommand((String)params.get("reportCommand")); + return projectInfoReq; + } + + private FileMetadataInfoResp getFileBaseInfo(Long outputDirId) { + GetFileBaseInfoReq getFileBaseInfoReq = new GetFileBaseInfoReq(); + getFileBaseInfoReq.setFileId(outputDirId); + SdmResponse fileBaseInfoResp = dataFeignClient.getFileBaseInfo(getFileBaseInfoReq); + if (!fileBaseInfoResp.isSuccess() || fileBaseInfoResp.getData() == null) { + log.warn("getFileBaseInfo failed, outputDirId:{}", outputDirId); + throw new RuntimeException("上一节点信息查询失败"); + } + return fileBaseInfoResp.getData(); + } + + private void uploadResultFileToMinio(String resultFilePath,Long currentNodeOutputDirId) { + try { + File resultFile = new File(resultFilePath); + if (!resultFile.exists()) { + log.warn("结果文件不存在: {}", resultFilePath); + return; + } + + // 创建一个临时的MultipartFile实现 + MultipartFile multipartFile = new MockMultipartFile( + resultFile.getName(), + resultFile.getName(), + "application/json", + Files.readAllBytes(resultFile.toPath()) + ); + + // 上传到MinIO + UploadFilesReq req = new UploadFilesReq(); + req.setDirId(currentNodeOutputDirId); + req.setFile(multipartFile); + log.info("上传文件参数:{}", req); + // 调用上传文件的方法 + // 注意:这里应该处理返回值 + SdmResponse sdmResponse = dataFeignClient.uploadFiles(req); + if (!sdmResponse.isSuccess()) { + log.error("上传文件失败: {}", sdmResponse.getMessage()); + throw new RuntimeException("上传文件失败:"); + } + log.info("结果文件已上传到MinIO: {}", resultFilePath); + } catch (Exception e) { + log.error("上传结果文件到MinIO失败: {}", resultFilePath, e); + throw new RuntimeException("上传结果文件到MinIO失败: " + e.getMessage(), e); + } + } + + /** + * 根据正则表达式从源目录复制文件到目标目录 + * + * @param sourcePath 源目录路径 + * @param targetPath 目标目录路径 + * @param fileRegularStr 正则表达式字符串,用于过滤文件 + */ + private void copyFilesWithRegex(String sourcePath, String targetPath, String fileRegularStr) { + try { + File sourceDir = new File(sourcePath); + File targetDir = new File(targetPath); + log.info("复制文件,从 {} 到 {}", sourcePath, targetPath); + + if (!sourceDir.exists() || !sourceDir.isDirectory()) { + log.warn("源目录不存在或不是一个目录: {}", sourcePath); + return; + } + + if (!targetDir.exists()) { + log.info("目标目录不存在,创建目录: {}", targetPath); + targetDir.mkdirs(); + } + + // 编译正则表达式(如果提供) + java.util.regex.Pattern pattern = null; + if (fileRegularStr != null && !fileRegularStr.isEmpty()) { + try { + pattern = java.util.regex.Pattern.compile(fileRegularStr, Pattern.CASE_INSENSITIVE); + } catch (Exception e) { + throw new RuntimeException("无效的正则表达式: " + fileRegularStr, e); + } + } + + // 遍历源目录中的所有文件 + File[] files = sourceDir.listFiles(); + log.info("源目录中的文件数量: {}", files.length); + if (files != null) { + for (File file : files) { + if (file.isFile()) { + log.info("开始判断文件:{} 是否符合当前正则: {}", file.getName(),fileRegularStr); + // 如果提供了正则表达式,则只匹配符合正则表达式的文件 + if (pattern != null) { + if (!pattern.matcher(file.getName()).matches()) { + log.info("文件:{} 不符合当前正则表达式,跳过", file.getName()); + continue; // 跳过不匹配的文件 + } + } + log.info("文件:{} 符合当前正则表达式,开始复制", file.getName()); + // 复制文件到目标目录 + copyFile(file, new File(targetDir, file.getName())); + } + } + } + } catch (Exception e) { + log.error("复制文件失败,从 {} 到 {}: {}", sourcePath, targetPath, e.getMessage(), e); + throw new RuntimeException("复制文件失败: " + e.getMessage(), e); + } + } + + /** + * 复制单个文件 + * + * @param sourceFile 源文件 + * @param targetFile 目标文件 + * @throws IOException 当复制过程中出现IO异常时抛出 + */ + private void copyFile(File sourceFile, File targetFile) throws IOException { + try (InputStream inputStream = new FileInputStream(sourceFile); + FileOutputStream outputStream = new FileOutputStream(targetFile)) { + + byte[] buffer = new byte[8192]; + int bytesRead; + while ((bytesRead = inputStream.read(buffer)) != -1) { + outputStream.write(buffer, 0, bytesRead); + } + } + } +} + + + +package com.sdm.flowable.delegate.handler; + +import com.alibaba.fastjson2.JSONObject; +import com.sdm.common.common.SdmResponse; +import com.sdm.common.entity.flowable.executeConfig.HPCExecuteConfig; +import com.sdm.common.entity.req.data.GetFileBaseInfoReq; +import com.sdm.common.entity.req.pbs.SubmitHpcTaskRemoteReq; +import com.sdm.common.entity.resp.data.FileMetadataInfoResp; +import com.sdm.common.feign.inter.data.IDataFeignClient; +import com.sdm.common.feign.inter.pbs.ITaskFeignClient; +import com.sdm.common.log.CoreLogger; +import com.sdm.common.config.FlowableConfig; +import com.sdm.flowable.entity.ProcessNodeParam; +import com.sdm.flowable.enums.AsyncTaskStatusEnum; +import com.sdm.flowable.service.IAsyncTaskRecordService; +import com.sdm.flowable.service.IProcessNodeParamService; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.flowable.engine.delegate.DelegateExecution; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.stream.Collectors; + +// HPC(executeType=HPC) +@Slf4j +@Component("HPC") +public class HpcHandler implements ExecutionHandler,HPCExecuteConfig> { + + @Autowired + private IAsyncTaskRecordService asyncTaskRecordService; + + @Autowired + private ITaskFeignClient taskFeignClient; + + @Autowired + private IProcessNodeParamService processNodeParamService; + + @Autowired + private IDataFeignClient dataFeignClient; + + /* + * params:业务参数 + * config:框架属性 + * */ + @Override + public void execute(DelegateExecution execution, Map params, HPCExecuteConfig config) { + CoreLogger.info("hpc process excute,params:{},config:{}",JSONObject.toJSONString(params),JSONObject.toJSONString(config)); + SubmitHpcTaskRemoteReq submitHpcTaskRemoteReq = convertParamsToReq(params); + String beforeNodeId = config.getBeforeNodeId(); + String currentNodeId =execution.getCurrentActivityId(); + String masterFileRegularStr = config.getMasterFileRegularStr(); + String inputFilesRegularStr = config.getInputFilesRegularStr(); + CoreLogger.info("beforeNodeId:{},currentNodeId:{},masterFileRegularStr:{},inputFilesRegularStr:{}",beforeNodeId,currentNodeId,masterFileRegularStr,inputFilesRegularStr); + + // params 取只是测试使用 + String processDefinitionId = (execution==null||StringUtils.isBlank(execution.getProcessDefinitionId()))? + params.get("processDefinitionId").toString():execution.getProcessDefinitionId(); + // params 取只是测试使用 + String processInstanceId = (execution==null||StringUtils.isBlank(execution.getProcessInstanceId()))? + params.get("processInstanceId").toString():execution.getProcessInstanceId(); + + submitHpcTaskRemoteReq.setMasterFileRegularStr(masterFileRegularStr); + submitHpcTaskRemoteReq.setInputFilesRegularStr(inputFilesRegularStr); + CoreLogger.info("hpc executeMode:{}",params.get("executeMode")); + String executeMode = params.get("executeMode").toString(); + if(StringUtils.isBlank(executeMode)|| + (!Objects.equals(executeMode,FlowableConfig.EXECUTE_MODE_AUTO)&& + !Objects.equals(executeMode,FlowableConfig.EXECUTE_MODE_MANUAL))){ + throw new RuntimeException("hpc executeMode illegal"); + } + // 处理hpc求解文件路径 + dealHpcFile(submitHpcTaskRemoteReq,beforeNodeId,currentNodeId, processDefinitionId,processInstanceId,executeMode,params); + // 实现HPC处理逻辑... + // INIT(初始化)/RUNNING(执行中)/SUCCESS(执行成功)/FAIL(执行失败) + String status = AsyncTaskStatusEnum.INIT.getCode(); + // 1. 调用 HPC 平台提交任务 + SdmResponse submitResp = taskFeignClient.adapterSubmitHpcJob(submitHpcTaskRemoteReq); + if(!submitResp.isSuccess()|| StringUtils.isBlank(submitResp.getData())){ + log.error("HpcHandler submit failed,jobName:{}",params); + throw new RuntimeException("HpcHandler submit failed,"+submitResp.getMessage()); + } + String hpcTaskId = submitResp.getData(); + CoreLogger.info("hpc task submit succ jobId:{}",hpcTaskId); + // 2. 存数据库(提交状态 + 外部任务ID) + asyncTaskRecordService.registerAsyncTask( + execution, + config.getCallbackNodeId(), // ReceiveTask ID + "HPC", // handlerType + new HashMap<>(), + status, + hpcTaskId + ); + + log.info("HPC 任务 {} 已提交", "hpcTaskId"); + } + + private void dealHpcFile(SubmitHpcTaskRemoteReq submitHpcTaskRemoteReq, String beforeNodeId, String currentNodeId, + String processDefinitionId, String processInstanceId, String executeMode,Map params) { + String simulationBaseDir = FlowableConfig.FLOWABLE_SIMULATION_BASEDIR; + // 查询前节点的工作目录---》本地磁盘对应目录 + // 查询前节点和当前节点的工作目录---》本地磁盘对应目录 + List processNodeParams = processNodeParamService.lambdaQuery() + .in(ProcessNodeParam::getNodeId, beforeNodeId, currentNodeId) // 使用 in 条件,匹配 beforeNodeId 或 currentNodeId + .eq(ProcessNodeParam::getProcessDefinitionId, processDefinitionId) + .eq(ProcessNodeParam::getProcessInstanceId, processInstanceId) + .orderByDesc(ProcessNodeParam::getUpdateTime) + .list(); + + Map> nodeParamMap = processNodeParams.stream() + .collect(Collectors.groupingBy(ProcessNodeParam::getNodeId)); + List beforeNodeParams = nodeParamMap.get(beforeNodeId); + List currentNodeParams = nodeParamMap.get(currentNodeId); + if(CollectionUtils.isEmpty(beforeNodeParams) || CollectionUtils.isEmpty(currentNodeParams)){ + throw new RuntimeException("未获取到当前节点或者求解文件节点信息"); + } + // 自动,前一个节点 + submitHpcTaskRemoteReq.setExecuteMode(executeMode); + if(Objects.equals(executeMode,FlowableConfig.EXECUTE_MODE_AUTO)){ + ProcessNodeParam beforeNode = beforeNodeParams.get(0); + String beforeNodeJectKey = getNodeObjectKey(beforeNode); + // 本地求解文件路径 taskLocalBaseDir + submitHpcTaskRemoteReq.setSimulationFileLocalPath(simulationBaseDir + beforeNodeJectKey); + CoreLogger.info("simulationFileLocalPath :{} ",simulationBaseDir + beforeNodeJectKey); + } + // 手动上传的 + if (Objects.equals(executeMode,FlowableConfig.EXECUTE_MODE_MANUAL)) { + List masterFilePaths = getFileListFromMap(params, "masterFileRegularStr"); + List inPutFilePaths = getFileListFromMap(params, "inputFilesRegularStr"); + if(CollectionUtils.isEmpty(masterFilePaths)||CollectionUtils.isEmpty(inPutFilePaths)){ + CoreLogger.warn("hpc executeMode manual,filepath illegal"); + throw new RuntimeException("hpc executeMode manual,filepath illegal"); + } + submitHpcTaskRemoteReq.setManualMasterFilepaths(masterFilePaths); + submitHpcTaskRemoteReq.setManualInputFilePaths(inPutFilePaths); + } + // hpc 节点回传路径 + ProcessNodeParam currentNode = currentNodeParams.get(0); + String currentNodeJectKey = getNodeObjectKey(currentNode); + // hpc 回传文件路径 + submitHpcTaskRemoteReq.setStdoutSpdmNasFilePath(simulationBaseDir + currentNodeJectKey); + CoreLogger.info("stdoutSpdmNasFilePath :{} ",simulationBaseDir + currentNodeJectKey); + } + + /** + * 通用方法:从 Map 中提取 explicitInputFiles 下的指定文件列表 + * @param dataMap 根 Map + * @param key 要提取的列表名称(masterFileRegularStr/inputFilesRegularStr) + * @return 字符串列表,不存在则返回空列表(避免 NPE) + */ + @SuppressWarnings("unchecked") + private static List getFileListFromMap(Map dataMap, String key) { + return Optional.ofNullable(dataMap) + // 提取 explicitInputFiles 子 Map + .map(map -> (Map) map.get("explicitInputFiles")) + // 提取指定 key 的列表 + .map(explicitMap -> (List) explicitMap.get(key)) + // 为空则返回空列表,避免后续遍历 NPE + .orElse(List.of()); + } + + + private String getNodeObjectKey(ProcessNodeParam processNodeParam){ + String paramJson = processNodeParam.getParamJson(); + JSONObject paramJsonObject = JSONObject.parseObject(paramJson); + // outputDirId + Long outputDirId = paramJsonObject.getLong("outputDirId"); + // 查data表 + GetFileBaseInfoReq getFileBaseInfoReq = new GetFileBaseInfoReq(); + getFileBaseInfoReq.setFileId(outputDirId); + SdmResponse fileBaseInfoResp = dataFeignClient.getFileBaseInfo(getFileBaseInfoReq); + if(!fileBaseInfoResp.isSuccess()||fileBaseInfoResp.getData()==null){ + CoreLogger.warn("getFileBaseInfo failed,outputDirId:{}",outputDirId); + throw new RuntimeException("上一节点信息查询失败"); + } + FileMetadataInfoResp fileMetadataInfoResp = fileBaseInfoResp.getData(); + String objectKey = fileMetadataInfoResp.getObjectKey(); + return objectKey; + } + + /** + * 将参数Map转换为SubmitHpcTaskRemoteReq对象的工具方法 + */ + private SubmitHpcTaskRemoteReq convertParamsToReq(Map params) { + SubmitHpcTaskRemoteReq req = new SubmitHpcTaskRemoteReq(); + if (params == null) { + return req; + } + // 基础字段映射 + req.setJobName(params.get("jobName").toString()); + // 处理int类型字段,包含空值和非数字的异常处理 + try { + req.setCoreNum(params.get("coreNum") != null ? Integer.parseInt(params.get("coreNum").toString()) : 0); + } catch (NumberFormatException e) { + CoreLogger.error("coreNum parse error:{},coreNum:{}",e.getMessage(),params.get("coreNum")); + req.setCoreNum(0); + } + req.setSoftware(params.get("software").toString()); + req.setJobType(params.get("jobType").toString()); + try { + req.setIndependence(params.get("independence") != null ? Integer.parseInt(params.get("independence").toString()) : 0); + } catch (NumberFormatException e) { + req.setIndependence(0); + } + req.setTaskId(params.get("taskId").toString()); + req.setTaskName(params.get("taskName").toString()); + req.setRunId(params.get("runId").toString()); + req.setRunName(params.get("runName").toString()); + // mock 时暂时自己传递,后面根据软件名称查询命令 todo 后面从表配置查询 + String command =(params.get("command")==null||StringUtils.isBlank(params.get("command").toString()))? + "\\\\CARSAFE\\share\\solver\\RLithium\\reta.exe -i %s" : params.get("command").toString(); + req.setCommand(command); + req.setProjectname(params.get("projectname").toString()); +// req.setFeatchFileType(params.get("featchFileType").toString()); + // req.setBeforeNodeId(params.get("beforeNodeId").toString()); + // 处理commandExpand字段(JSON字符串转Map) + + // 动态命令 +// String commandExpandJson = params.get("commandExpand").toString(); +// if (StringUtils.isNotBlank(commandExpandJson)) { +// try { +// // 将JSON字符串转换为Map +// Map commandExpand = objectMapper.readValue( +// commandExpandJson, +// new TypeReference>() {} +// ); +//// req.setCommandExpand(commandExpand); +// } catch (Exception e) { +// CoreLogger.error("convertParamsToReq error:{},params:{}",e.getMessage(), JSONObject.toJSONString(params)); +// // 如设为null或空Map +//// req.setCommandExpand(new HashMap<>()); +// } +// } + + return req; + } + + + public String mockinit(){ +// SubmitHpcTaskRemoteReq mockReq = mockSubmitHpcTaskReq(); +// SdmResponse submitResp = taskFeignClient.submitHpcJob(mockReq); +// if(!submitResp.isSuccess()|| StringUtils.isBlank(submitResp.getData())){ +// log.error("HpcHandler submit failed,jobName:{}",mockReq.getJobName()); +// System.out.println("失败"); +// return "失败"; +// } +// String hpcTaskId = submitResp.getData(); + Map params = getParams(); + HPCExecuteConfig hpcExecuteConfig = new HPCExecuteConfig(); + // todo `flowable`.`process_node_param` + hpcExecuteConfig.setBeforeNodeId("uuid-node-8d3e61e7-1374-419c-9e46-210cb88c1113"); + execute(null,params,hpcExecuteConfig); + return "ok"; + } + + private Map getParams() { + Map params = new HashMap<>(); + // 基础字段 + params.put("timesmap", String.valueOf(System.currentTimeMillis())); // 示例时间戳(2025-07-29 00:00:00) + params.put("jobName", "HPC-数据处理作业-"+ System.currentTimeMillis()); + params.put("coreNum", 32); + params.put("software", "reta.exe"); + params.put("jobType", "流体动力学仿真"); + params.put("independence", 1); + params.put("taskId", "123456"); + params.put("taskName", "锂电池热管理系统研发"); + params.put("runId", "55555"); + params.put("runName", "HPC-电池"); +// params.put("command", "\\\\CARSAFE\\share\\solver\\RLithium\\reta.exe -i %retaFile"); + params.put("command", "\\\\CARSAFE\\share\\solver\\RLithium\\reta.exe -i .\\model\\aa.xml"); + params.put("projectname", "新能源汽车锂电池安全性能优化项目"); + params.put("featchFileType", "hpcNode"); // 补充示例值 + params.put("beforeNodeId", null); // 示例空值 + // commandExpand去掉outName后的JSON字符串 + String commandExpandJson = "{\n" + + " \"retaFile\": {\n" + + " \"id\": 1,\n" + + " \"keyEnName\": \"retaFile\",\n" + + " \"keyCnName\": \"电池求解文件\",\n" + + " \"valueType\": \"file_regex_match\",\n" + + " \"inputValue\": \"*.jpg\"\n" + + " }\n" + + "}"; + params.put("commandExpand", commandExpandJson); + return params; + } + + private SubmitHpcTaskRemoteReq mockSubmitHpcTaskReq() { + SubmitHpcTaskRemoteReq req = new SubmitHpcTaskRemoteReq(); + // 生成任务名称:年月日-时分秒,如 20251127-145120 + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd-HHmmss"); + String timestamp = sdf.format(new Date()); + req.jobName = "电池_"+timestamp; + req.coreNum = 8; // 默认8核 + req.software = "reta.exe"; + req.jobType = "仿真计算"; + req.independence = 1; // 独立任务 +// req.inputFiles = Arrays.asList("input1.dat", "input2.dat", "input3.dat"); +// req.masterFile = "master.dat"; + req.taskId = "TASKID_" + timestamp; + req.taskName = "测试任务_" + timestamp; + req.runId = "RUNID_" + timestamp; + req.runName = "测试算力_" + timestamp; + req.command = "\\\\CARSAFE\\share\\solver\\RLithium\\reta.exe -i \\\\CARSAFE\\share\\testproject\\testjob\\testtask\\model\\aa.xml"; + req.projectname = "新能源汽车锂电池安全性能优化"; + return req; + } + + +} + + + +package com.sdm.flowable.delegate.handler; + +import com.sdm.common.entity.flowable.executeConfig.BaseExecuteConfig; +import com.sdm.common.entity.flowable.executeConfig.HttpExecuteConfig; +import org.flowable.engine.delegate.DelegateExecution; +import org.springframework.stereotype.Component; + +import java.util.Map; + +// HTTP请求执行器(executeType=HTTP) +@Component("http") +public class HttpHandler implements ExecutionHandler,HttpExecuteConfig> { + + @Override + public void execute(DelegateExecution execution, Map params, HttpExecuteConfig config) { + // 实现HTTP请求逻辑... + } +} + + + +package com.sdm.flowable.delegate.handler; + +import com.sdm.common.entity.flowable.executeConfig.LocalAppExecuteConfig; +import org.flowable.engine.delegate.DelegateExecution; +import org.springframework.stereotype.Component; + +import java.util.Map; + +// 本地应用处理器(executeType=localApp) +@Component("localApp") +public class LocalAppHandler implements ExecutionHandler,LocalAppExecuteConfig> { + @Override + public void execute(DelegateExecution execution, Map params, LocalAppExecuteConfig config) { + // 实现本地应用处理逻辑... + } +} + + + +package com.sdm.flowable.delegate; + +import com.alibaba.fastjson2.JSONObject; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.sdm.common.common.SdmResponse; +import com.sdm.common.entity.flowable.executeConfig.BaseExecuteConfig; +import com.sdm.common.entity.req.data.GetFileBaseInfoReq; +import com.sdm.common.entity.req.flowable.AsyncCallbackRequest; +import com.sdm.common.entity.resp.data.FileMetadataInfoResp; +import com.sdm.common.feign.inter.data.IDataFeignClient; +import com.sdm.common.config.FlowableConfig; +import com.sdm.flowable.delegate.handler.ExecutionHandler; +import com.sdm.flowable.service.IAsyncTaskRecordService; +import com.sdm.flowable.service.IProcessNodeParamService; +import com.sdm.flowable.util.FlowNodeIdUtils; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.ObjectUtils; +import org.flowable.engine.RuntimeService; +import org.flowable.engine.delegate.DelegateExecution; +import org.flowable.engine.delegate.JavaDelegate; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Collections; +import java.util.Map; + +/** + * 通用执行器,用于处理不同类型的节点(如 userTask, serviceTask)的执行逻辑。 + */ +@Component("universalDelegate") +@Slf4j +public class UniversalDelegate implements JavaDelegate { + @Autowired + private ObjectMapper objectMapper; + + @Autowired + private IProcessNodeParamService processNodeParamService; + + @Autowired + private IAsyncTaskRecordService asyncTaskRecordService; + + @Autowired + private Map handlerMap; // 执行器映射 + + @Autowired + private RuntimeService runtimeService; + + @Autowired + private IDataFeignClient dataFeignClient; + + @Override + public void execute(DelegateExecution execution) { + try { + // 1. 获取当前节点信息 + String runId = (String) execution.getVariable("runId"); + String processDefinitionId = execution.getProcessDefinitionId(); + String procInstId = execution.getProcessInstanceId(); + String nodeId = execution.getCurrentActivityId(); + String nodeName = execution.getCurrentFlowElement().getName(); + + // 2. 读取输入参数 + JSONObject params = processNodeParamService.getParam(processDefinitionId,nodeId,runId); + log.info("universalDelegate 开始执行节点, runId:{},processDefinitionId:{},procInstId: {}, nodeId: {}, nodeName: {},当前节点执行参数 params:{}",runId,processDefinitionId, procInstId, nodeId, nodeName,params); + + // 3、创建本地文件夹,用于后续节点计算直接从本地读取,不需要再从minio中获取数据 + Long currentNodeOutputDirId = params.getLong("outputDirId"); + if(ObjectUtils.isEmpty(currentNodeOutputDirId)){ + throw new RuntimeException("当前节点未配置输出文件夹"); + } + GetFileBaseInfoReq getFileBaseInfoReq = new GetFileBaseInfoReq(); + getFileBaseInfoReq.setFileId(currentNodeOutputDirId); + SdmResponse fileBaseInfoResp = dataFeignClient.getFileBaseInfo(getFileBaseInfoReq); + if(!fileBaseInfoResp.isSuccess()||fileBaseInfoResp.getData()==null){ + throw new RuntimeException("当前节点未查询到输入文件夹"); + } + String objectKey = fileBaseInfoResp.getData().getObjectKey(); + FlowNodeIdUtils.prepareLocalDir(objectKey); + + // 检查是否有扩展元素配置 + if (execution.getCurrentFlowElement().getExtensionElements() != null && + execution.getCurrentFlowElement().getExtensionElements().get(FlowableConfig.EXECUTECONFIG) != null) { + + String extensionElement = execution + .getCurrentFlowElement() + .getExtensionElements() + .get(FlowableConfig.EXECUTECONFIG).get(0).getElementText(); + + log.info("节点扩展配置, 流程 runId:{},processDefinitionId:{},实例ID: {}, 节点ID: {}, 扩展配置: {}",runId,processDefinitionId, procInstId, nodeId, extensionElement); + + BaseExecuteConfig config = + objectMapper.readValue(extensionElement, BaseExecuteConfig.class); + + String executeType = config.getExecuteType(); + ExecutionHandler handler = handlerMap.get(executeType); + if (handler == null) { + log.error("不支持的执行方式, 流程实例ID: {}, 节点ID: {}, 执行方式: {}", procInstId, nodeId, executeType); + throw new RuntimeException("不支持的执行方式:" + executeType); + } + + log.info("开始执行具体任务处理逻辑, 流程实例ID: {}, 节点ID: {}, 执行方式: {}", procInstId, nodeId, executeType); + // 执行具体的任务处理逻辑 + handler.execute(execution, params, config); + log.info("任务处理逻辑执行完成, 流程实例ID: {}, 节点ID: {}, 执行方式: {}", procInstId, nodeId, executeType); + } else { + // 对于没有配置 executeConfig 的节点(如 userTask),直接完成任务 + log.info("节点 {} 没有执行配置,直接完成任务", nodeName); + } + log.info("节点执行完成, 流程实例ID: {}, 节点ID: {}, 节点名称: {}", procInstId, nodeId, nodeName); + } catch (Exception e) { + // 处理失败情况 - 跳转到重试任务 + log.error("节点执行过程中发生异常", e); + throw new RuntimeException("节点执行过程中发生异常:" + e.getMessage()); + // handleFailure(execution, e); + } + } + + + /** + * 处理任务执行失败的情况 + * @param execution 当前执行上下文 + * @param e 异常信息 + */ + private void handleFailure(DelegateExecution execution, Exception e) { + String procInstId = execution.getProcessInstanceId(); + String failedNodeId = execution.getCurrentActivityId(); + String nodeName = execution.getCurrentFlowElement().getName(); + + log.error("节点执行失败,流程实例ID: {}, 节点ID: {}, 节点名称: {}, 错误信息: {}", procInstId, failedNodeId, nodeName, e.getMessage(), e); + + // 记录失败节点(供后续重试用) + runtimeService.setVariable(procInstId, FlowableConfig.RETRY_ORIGIN_NODE_ID, failedNodeId); + runtimeService.setVariable(procInstId, FlowableConfig.RETRY_ERROR_MESSAGE, e.getMessage()); + + log.info("准备跳转到重试任务, 流程实例ID: {}, 失败节点ID: {}, 重试任务ID: {}", procInstId, failedNodeId, FlowNodeIdUtils.getRetryTaskId()); + + // 跳转到通用重试任务 + runtimeService.createChangeActivityStateBuilder() + .processInstanceId(procInstId) + .moveActivityIdsToSingleActivityId( + Collections.singletonList(failedNodeId), + FlowNodeIdUtils.getRetryTaskId() + ) + .changeState(); + + log.info("已完成跳转到重试任务, 流程实例ID: {}, 失败节点ID: {}", procInstId, failedNodeId); + // 不抛出异常,让流程继续 + } + + /** + * 外部系统回调接口,用于唤醒等待的流程实例 + * @param processInstanceId 流程实例ID + * @param nodeId 节点ID + * @param resultData 结果数据 + */ + /** + * HPC 回调接口,用于唤醒等待的流程 + */ + public void signalByTaskId(AsyncCallbackRequest request) { + log.info("接收到异步回调请求: {}", request); + asyncTaskRecordService.completeAsyncTask(request); + log.info("异步回调处理完成: {}", request.getAsyncTaskId()); + } + +} + + + +package com.sdm.flowable.dto; + +import lombok.Data; + +@Data +public class ProcessDefinitionInfo { + private String id; + private String key; + private String name; + private String version; + private String deploymentId; + private String resourceName; + private boolean suspended; +} + + + +package com.sdm.flowable.dto.req; + +import lombok.Data; + +import java.util.HashMap; +import java.util.Map; + +@Data +public class CompleteTaskReq { + /** + * 流程实例ID + */ + private String processInstanceId; + /** + * 流程节点id + */ + private String taskDefinitionKey; + private Map variables = new HashMap<>(); + /** + * 任务类型:FlowElementTypeEnums + * userTask - 普通用户任务 + * serviceTask - ServiceTask前置隐藏等待任务 + */ + private String flowelementType; + + +} + + + +package com.sdm.flowable.dto.req; + +import lombok.Data; +import java.util.Map; + +@Data +public class PreviewNodeInputFilesReq { + /** 流程定义ID (可选,视上下文而定) */ + private String processDefinitionId; + + /** 算例ID */ + private String runId; + + /** 当前节点ID */ + private String nodeId; + + /** 前置节点ID */ + private String beforeNodeId; + + /** + * 正则配置 Map + * Key: 类别标识 (如 "master", "input", "script") + * Value: 正则表达式 (如 "^.+\\.xml$", "^.+\\.json$") + */ + private Map regexConfig; +} + + + +package com.sdm.flowable.dto.req; + +import lombok.Data; + +import java.io.Serializable; +import java.util.Map; + +/** + * 重试请求参数 + */ +@Data +public class RetryRequest implements Serializable { + private static final long serialVersionUID = 1L; + /** + * 流程实例ID + */ + String procInstId; + + /** + * 目标节点ID + */ + private String targetNodeId; + + /** + * 重试参数 + */ + private Map variables; +} + + + +package com.sdm.flowable.dto.resp; + +import lombok.Data; + +@Data +public class NodeInputFilePreviewResp { + /** 文件名称 */ + private String fileName; + + /** 文件大小 */ + private String fileSize; + + /** 创建时间 */ + private String createTime; + + /** + * 文件完整本地路径 (核心) + * 后端同步MinIO后,这里统一返回本地绝对路径 + */ + private String filePath; + + /** + * 文件来源 + * CURRENT: 当前节点手动上传 + * PREVIOUS: 上游节点产出 + */ + private String source; + + /** + * 文件类别 key + * 对应请求中 regexConfig 的 Key (如 "master", "input") + * 如果未匹配到任何正则但被保留(如手动上传),可为 "UNCATEGORIZED" + */ + private String fileCategory; +} + + + +package com.sdm.flowable.entity; + +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableName; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import java.time.LocalDateTime; +import java.io.Serializable; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.experimental.Accessors; + +/** + *

+ * 异步任务执行记录表 + *

+ * + * @author author + * @since 2025-11-26 + */ +@Data +@EqualsAndHashCode(callSuper = false) +@Accessors(chain = true) +@TableName("async_task_record") +@ApiModel(value="AsyncTaskRecord对象", description="异步任务执行记录表") +public class AsyncTaskRecord implements Serializable { + + private static final long serialVersionUID = 1L; + + @ApiModelProperty(value = "主键ID,自增") + @TableId(value = "id", type = IdType.AUTO) + private Long id; + + @ApiModelProperty(value = "异步任务唯一标识,全局唯一,用于定位单个异步任务") + @TableField(value = "asyncTaskId") + private String asyncTaskId; + + @ApiModelProperty(value = "流程实例ID,关联工作流引擎的流程实例(如Camunda的processInstanceId)") + @TableField(value = "processInstanceId") + private String processInstanceId; + + @ApiModelProperty(value = "流程执行ID,关联工作流引擎的执行实例(如Camunda的executionId)") + @TableField(value = "executionId") + private String executionId; + + @ApiModelProperty(value = "接收任务ID,关联工作流中接收任务节点的ID(用于异步回调触发流程继续)") + @TableField(value = "receiveTaskId") + private String receiveTaskId; + + @ApiModelProperty(value = "业务处理器类型,标识任务对应的业务处理逻辑,例如:HPC(高性能计算)/OCR(图文识别)/AI(智能分析)") + @TableField(value = "handlerType") + private String handlerType; + + @ApiModelProperty(value = "任务请求参数,JSON格式字符串,存储触发异步任务时的入参信息") + @TableField(value = "requestJson") + private String requestJson; + + @ApiModelProperty(value = "任务执行结果,JSON格式字符串,存储异步任务完成后的返回数据(成功/失败均记录)") + @TableField(value = "resultJson") + private String resultJson; + + @ApiModelProperty(value = "任务状态:INIT(初始化)/RUNNING(执行中)/SUCCESS(执行成功)/FAIL(执行失败)") + @TableField(value = "status") + private String status; + + @ApiModelProperty(value = "任务创建时间,默认当前时间") + @TableField(value = "createTime") + private LocalDateTime createTime; + + @ApiModelProperty(value = "任务更新时间,数据变更时自动更新为当前时间") + @TableField(value = "updateTime") + private LocalDateTime updateTime; + + +} +
+ + +package com.sdm.flowable.entity; + +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableName; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import java.time.LocalDateTime; +import java.io.Serializable; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.experimental.Accessors; + +/** + *

+ * 流程节点输入参数表 + *

+ * + * @author author + * @since 2025-11-25 + */ +@Data +@EqualsAndHashCode(callSuper = false) +@Accessors(chain = true) +@TableName("process_node_param") +@ApiModel(value="ProcessNodeParam对象", description="流程节点输入参数表") +public class ProcessNodeParam implements Serializable { + + private static final long serialVersionUID = 1L; + + @TableId(value = "id", type = IdType.AUTO) + private Long id; + + @ApiModelProperty(value = "流程定义ID") + @TableField("processDefinitionId") + private String processDefinitionId; + + @ApiModelProperty(value = "流程实例ID") + @TableField("processInstanceId") + private String processInstanceId; + + @ApiModelProperty(value = "节点ID") + @TableField("nodeId") + private String nodeId; + + @ApiModelProperty(value = "流程绑定的算例uuid") + @TableField("runId") + private String runId; + + @ApiModelProperty(value = "输入参数JSON") + @TableField("paramJson") + private String paramJson; + + @TableField("createTime") + private LocalDateTime createTime; + + @TableField("updateTime") + private LocalDateTime updateTime; + + +} +
+ + +package com.sdm.flowable.enums; + +/** + * 异步任务结果枚举 + */ +public enum AsyncTaskStatusEnum { + INIT("INIT"), + RUNNING("RUNNING"), + SUCCESS("SUCCESS"), + FAIL("FAIL"); + private String code; + + AsyncTaskStatusEnum(String code) { + this.code = code; + } + public String getCode() { + return code; + } +} + + + +package com.sdm.flowable.enums; + +public enum FlowElementTypeEnums { + STARTEVENT("startEvent"), + ENDEVENT("endEvent"), + USERTASK("userTask"), + SERVICETASK("serviceTask"), + EXCLUSIVEGATEWAY("exclusiveGateway"), + PARALLELGATEWAY("parallelGateway"), + SEQUENCEFLOW("sequenceFlow"); + private final String type; + + FlowElementTypeEnums(String startEvent) { + this.type = startEvent; + } + public String getType() { + return type; + } + + public static FlowElementTypeEnums fromString(String type) { + for (FlowElementTypeEnums flowElementType : FlowElementTypeEnums.values()) { + if (flowElementType.type.equals(type)) { + return flowElementType; + } + } + throw new IllegalArgumentException("Unknown type: " + type); + } +} + + + +package com.sdm.flowable.enums; + +/** + * 节点状态枚举 + */ +public enum NodeStateEnum { + /** + * 等待中 + */ + PENDING("pending"), + + /** + * 活动中 + */ + ACTIVE("active"), + + /** + * 挂起 + */ + SUSPENDED("suspended"), + + /** + * 错误 + */ + ERROR("error"), + + /** + * 已完成 + */ + FINISHED("finished"); + + private final String code; + + NodeStateEnum(String code) { + this.code = code; + } + + public String getCode() { + return code; + } +} + + + +package com.sdm.flowable.enums; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +/** + * 操作类型枚举 & 权限矩阵定义 + */ +public enum OperationTypeEnum { + + + /** + * 挂起/暂停:只能对 RUNNING 状态的操作 + */ + SUSPEND(ProcessInstanceStateEnum.RUNNING), + + /** + * 激活/恢复:只能对 SUSPENDED 状态的操作 + */ + ACTIVATE(ProcessInstanceStateEnum.SUSPENDED), + + /** + * 终止/取消:RUNNING, SUSPENDED, FAILED 都可以终止 + */ + TERMINATE(ProcessInstanceStateEnum.RUNNING, ProcessInstanceStateEnum.SUSPENDED, ProcessInstanceStateEnum.ERROR), + + /** + * 重试/跳转:必须是 FAILED 状态(有死信) + */ + RETRY(ProcessInstanceStateEnum.ERROR), + + /** + * 启动:通常对应无实例状态,这里作为占位 + */ + START(), + + /** + * 归档/查看报告:必须是 COMPLETED, TERMINATED + */ + ARCHIVE(ProcessInstanceStateEnum.COMPLETED, ProcessInstanceStateEnum.CANCELLED), + + /** + * 正常执行/提交任务:只能对 RUNNING 状态操作 + */ + EXECUTE(ProcessInstanceStateEnum.RUNNING); + + // 该操作允许的前置状态列表 + private final List allowStates; + + OperationTypeEnum(ProcessInstanceStateEnum... states) { + this.allowStates = states != null ? Arrays.asList(states) : Collections.emptyList(); + } + + public List getAllowStates() { + return allowStates; + } +} + + + +package com.sdm.flowable.enums; + +/** + * 流程实例状态枚举 + */ +public enum ProcessInstanceStateEnum { + /** + * 运行中 + */ + RUNNING("running"), + + /** + * 挂起 + */ + SUSPENDED("suspended"), + + /** + * 错误 + */ + ERROR("error"), + + /** + * 已完成 + */ + COMPLETED("completed"), + + /** + * 已取消 + */ + CANCELLED("cancelled"); + + private final String code; + + ProcessInstanceStateEnum(String code) { + this.code = code; + } + + public String getCode() { + return code; + } +} + + + +package com.sdm.flowable.filter; + +import com.sdm.common.common.ThreadLocalContext; +import com.sdm.common.config.WhitelistProperties; +import jakarta.servlet.*; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.AntPathMatcher; + +import java.io.IOException; +import java.util.List; + +@Slf4j +public class AuthFilter implements Filter { + @Autowired + private WhitelistProperties whitelistProperties; + + private List excludedPaths; + private final AntPathMatcher pathMatcher = new AntPathMatcher(); + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + Filter.super.init(filterConfig); + // 从初始化参数中读取白名单 + excludedPaths = whitelistProperties.getPaths(); + log.info("----------- AuthFilter init ----------"); + } + + @Override + public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException { + if (servletRequest instanceof HttpServletRequest) { + HttpServletRequest req = (HttpServletRequest) servletRequest; + String path = req.getRequestURI().substring(req.getContextPath().length()); + // 检查当前请求是否在白名单中 + for (String excludedPath : excludedPaths) { + if (pathMatcher.match(excludedPath, path)) { + filterChain.doFilter(servletRequest, servletResponse); + return; + } + } + if(!ThreadLocalContext.verifyRequest(req)) + { + HttpServletResponse response = (HttpServletResponse) servletResponse; + response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); // 401 + response.setContentType("application/json;charset=UTF-8"); + response.getWriter().write("{\"code\":401,\"message\":\"未登录或认证信息缺失\"}"); + return; + } + } + filterChain.doFilter(servletRequest, servletResponse); + } + + @Override + public void destroy() { + Filter.super.destroy(); + log.info("----------- AuthFilter destroy ----------"); + } +} + + + +package com.sdm.flowable; + +import org.mybatis.spring.annotation.MapperScan; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.client.discovery.EnableDiscoveryClient; +import org.springframework.cloud.openfeign.EnableFeignClients; +import org.springframework.scheduling.annotation.EnableScheduling; + +@SpringBootApplication(scanBasePackages = {"com.sdm.flowable","com.sdm.common"}) +@EnableDiscoveryClient +@EnableScheduling +@EnableFeignClients(basePackages = "com.sdm.common.feign") +@MapperScan("com.sdm.flowable.dao") +public class FlowableApplication { + + public static void main(String[] args) { + SpringApplication.run(FlowableApplication.class, args); + } + +} + + + +package com.sdm.flowable.listener; + +import com.sdm.common.config.FlowableConfig; +import com.sdm.flowable.util.FlowNodeIdUtils; +import lombok.extern.slf4j.Slf4j; +import org.flowable.bpmn.model.BpmnModel; +import org.flowable.bpmn.model.FlowElement; +import org.flowable.bpmn.model.UserTask; +import org.flowable.engine.RepositoryService; +import org.flowable.engine.RuntimeService; +import org.flowable.engine.TaskService; +import org.flowable.engine.delegate.DelegateExecution; +import org.flowable.engine.delegate.ExecutionListener; +import org.flowable.task.api.Task; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Collections; + +/** + * 重试跳转监听器,在 retryInputTask 完成时执行跳转逻辑 + */ +@Slf4j +@Component("retryRedirectListener") +public class RetryRedirectListener implements ExecutionListener { + + @Autowired + private RuntimeService runtimeService; + + @Autowired + private RepositoryService repositoryService; + + @Autowired + private TaskService taskService; + + @Override + public void notify(DelegateExecution execution) { + String runId = (String) execution.getVariable("runId"); + String processDefinitionId = execution.getProcessDefinitionId(); + String procInstId = execution.getProcessInstanceId(); + String currentActivityId = execution.getCurrentActivityId(); + + log.info("重试跳转监听器开始执行,流程runId:{} 流程实例ID: {},流程定义id:{}, 当前节点ID: {}",runId, procInstId,processDefinitionId, currentActivityId); + + // 1. 获取目标节点ID(由重试接口设置) + String targetNodeId = (String) runtimeService.getVariable(procInstId, FlowableConfig.RETRY_TARGET_NODE_ID); + log.info("获取重试目标节点ID, 流程实例ID: {}, 目标节点ID: {}", procInstId, targetNodeId); + + if (targetNodeId == null) { + log.error("未指定重试目标节点, 流程实例ID: {}", procInstId); + throw new IllegalStateException("未指定重试目标节点"); + } + + // 2. 判断目标节点类型 + log.info("获取BPMN模型, 流程定义ID: {}", execution.getProcessDefinitionId()); + BpmnModel bpmnModel = repositoryService.getBpmnModel(execution.getProcessDefinitionId()); + FlowElement targetElement = bpmnModel.getMainProcess().getFlowElement(targetNodeId); + + log.info("获取目标节点元素, 流程实例ID: {}, 目标节点ID: {}, 目标节点对象为空: {}", procInstId, targetNodeId, targetElement == null); + + if (targetElement == null) { + log.error("目标节点不存在, 流程实例ID: {}, 目标节点ID: {}", procInstId, targetNodeId); + throw new IllegalArgumentException("目标节点不存在: " + targetNodeId); + } + + log.info("目标节点类型: {}", targetElement.getClass().getSimpleName()); + + // 3. 跳转到目标节点 + log.info("开始跳转到目标节点, 流程实例ID: {}, 源节点ID: {}, 目标节点ID: {}", + procInstId, FlowNodeIdUtils.getRetryTaskId(), targetNodeId); + + runtimeService.createChangeActivityStateBuilder() + .processInstanceId(procInstId) + .moveActivityIdsToSingleActivityId( + Collections.singletonList(FlowNodeIdUtils.getRetryTaskId()), + targetNodeId + ) + .changeState(); + + log.info("节点跳转操作完成, 流程实例ID: {}, 目标节点ID: {}", procInstId, targetNodeId); + + // 4. 如果目标是 UserTask,自动完成它(模拟"无人值守") + if (targetElement instanceof UserTask) { + log.info("目标节点是UserTask,准备自动完成, 流程实例ID: {}, 目标节点ID: {}", procInstId, targetNodeId); + + // 等待任务创建(异步操作可能有延迟) + awaitUserTaskCreated(procInstId, targetNodeId); + + // 查询刚创建的 UserTask + log.info("查询刚创建的UserTask, 流程实例ID: {}, 目标节点ID: {}", procInstId, targetNodeId); + Task userTask = taskService.createTaskQuery() + .processInstanceId(procInstId) + .taskDefinitionKey(targetNodeId) + .orderByTaskCreateTime().desc() // 取最新 + .singleResult(); + + log.info("UserTask查询结果, 流程实例ID: {}, 目标节点ID: {}, 任务对象为空: {}", procInstId, targetNodeId, userTask == null); + + if (userTask != null) { + // 自动完成 UserTask(不传变量,或传空) + log.info("开始自动完成UserTask, 任务ID: {}, 流程实例ID: {}, 目标节点ID: {}", userTask.getId(), procInstId, targetNodeId); + taskService.complete(userTask.getId()); + log.info("自动完成 UserTask: {}", targetNodeId); + } + } else { + log.info("目标节点不是UserTask,无需自动完成, 节点类型: {}", targetElement.getClass().getSimpleName()); + } + + // 如果是 ServiceTask,引擎会自动执行,无需干预 + log.info("已完成跳转到目标节点: {}", targetNodeId); + } + + // 辅助方法:等待 UserTask 创建(简单轮询) + private void awaitUserTaskCreated(String procInstId, String taskDefKey) { + log.info("等待UserTask创建, 流程实例ID: {}, 任务定义Key: {}", procInstId, taskDefKey); + + int attempts = 0; + while (attempts < 10) { + long count = taskService.createTaskQuery() + .processInstanceId(procInstId) + .taskDefinitionKey(taskDefKey) + .count(); + + log.debug("检查UserTask是否存在, 流程实例ID: {}, 任务定义Key: {}, 当前尝试次数: {}, 任务数量: {}", + procInstId, taskDefKey, attempts, count); + + if (count > 0) { + log.info("UserTask已创建, 流程实例ID: {}, 任务定义Key: {}, 尝试次数: {}", procInstId, taskDefKey, attempts); + return; + } + + try { + Thread.sleep(200); // 200ms + attempts++; + } catch (InterruptedException e) { + log.warn("等待UserTask创建时线程被中断, 流程实例ID: {}, 任务定义Key: {}", procInstId, taskDefKey); + Thread.currentThread().interrupt(); + break; + } + } + + log.warn("等待UserTask创建超时, 流程实例ID: {}, 任务定义Key: {}, 最大尝试次数: {}", procInstId, taskDefKey, attempts); + } +} + + + +package com.sdm.flowable.listener; + +import com.alibaba.fastjson2.JSONObject; +import com.sdm.common.common.SdmResponse; +import com.sdm.common.entity.req.data.GetFileBaseInfoReq; +import com.sdm.common.entity.resp.data.FileMetadataInfoResp; +import com.sdm.common.feign.inter.data.IDataFeignClient; +import com.sdm.flowable.service.IProcessNodeParamService; +import com.sdm.flowable.util.FlowNodeIdUtils; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.ObjectUtils; +import org.flowable.engine.delegate.DelegateExecution; +import org.flowable.engine.delegate.ExecutionListener; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * UserTask 启动时准备本地输出目录的监听器 + */ +@Slf4j +@Component("userTaskDirectoryPreparationListener") +public class UserTaskDirectoryPreparationListener implements ExecutionListener { + + @Autowired + private IProcessNodeParamService processNodeParamService; + + @Autowired + private IDataFeignClient dataFeignClient; + + @Override + public void notify(DelegateExecution execution) { + String runId = (String) execution.getVariable("runId"); + String nodeId = execution.getCurrentActivityId(); + String processDefinitionId = execution.getProcessDefinitionId(); + + //创建本地文件夹,用于后续节点计算直接从本地读取,不需要再从minio中获取数据 + JSONObject params =processNodeParamService.getParam(processDefinitionId,nodeId,runId); + log.info("userTaskDirectoryPreparationListener, 启动流程 runId:{},nodeId:{},实例id: {},参数 params:{}", runId,nodeId,execution.getProcessInstanceId(),params); + Long currentNodeOutputDirId = params.getLong("outputDirId"); + if(ObjectUtils.isEmpty(currentNodeOutputDirId)){ + throw new RuntimeException("当前节点未配置输出文件夹"); + } + GetFileBaseInfoReq getFileBaseInfoReq = new GetFileBaseInfoReq(); + getFileBaseInfoReq.setFileId(currentNodeOutputDirId); + SdmResponse fileBaseInfoResp = dataFeignClient.getFileBaseInfo(getFileBaseInfoReq); + if(!fileBaseInfoResp.isSuccess()||fileBaseInfoResp.getData()==null){ + throw new RuntimeException("当前节点未查询到输入文件夹"); + } + String objectKey = fileBaseInfoResp.getData().getObjectKey(); + FlowNodeIdUtils.prepareLocalDir(objectKey); + } +} + + + +package com.sdm.flowable.process; + +import com.sdm.common.common.SdmResponse; +import com.sdm.common.entity.flowable.dto.ProcessDefinitionDTO; +import com.sdm.common.entity.req.flowable.AsyncCallbackRequest; +import com.sdm.common.entity.resp.flowable.DeployFlowableResp; +import com.sdm.common.entity.resp.flowable.ProcessInstanceDetailResponse; +import com.sdm.flowable.dto.req.CompleteTaskReq; +import com.sdm.flowable.dto.req.PreviewNodeInputFilesReq; +import com.sdm.flowable.dto.resp.NodeInputFilePreviewResp; +import org.flowable.engine.runtime.ProcessInstance; +import org.flowable.validation.ValidationError; + +import java.util.*; + +public interface Iprocess { + // 验证并返回模型验证错误信息 + List validateModel(ProcessDefinitionDTO processDTO) throws Exception; + + // 部署流程(前端传入Flowable标准JSON) + SdmResponse deploy(ProcessDefinitionDTO processDTO) throws Exception; + + void deleteAllDeployments(); + + ProcessInstance startByProcessDefinitionId(String processDefinitionId, Map variables); + + SdmResponse suspendProcessInstance(String processInstanceId); + + SdmResponse activateProcessInstance(String processInstanceId); + + SdmResponse cancelProcessInstance(String processInstanceId); + + /** + * 查询流程实例及所有节点的详细状态(返回结构化 DTO) + * 如果只传了processDefinitionId,根据流程定义返回流程基本信息和节点信息, + * 如果还传了processInstanceId,再封装流程状态和节点状态 + */ + SdmResponse getProcessAndNodeDetailByInstanceId(String processDefinitionId, String processInstanceId, String runId); + + SdmResponse> previewNodeInputFiles(PreviewNodeInputFilesReq req); + + + SdmResponse continueServiceTask( CompleteTaskReq req); + + SdmResponse asyncCallback(AsyncCallbackRequest request); + + SdmResponse retryFailedNode( String processInstanceId, String failNodeId); + + SdmResponse retryToNode(String procInstId, String targetNodeId, Map newVariables); +} + + + +package com.sdm.flowable.process; + +import com.alibaba.fastjson2.JSONObject; +import com.sdm.common.common.SdmResponse; +import com.sdm.common.entity.flowable.dto.NodeDetailInfo; +import com.sdm.common.entity.flowable.dto.ProcessDefinitionDTO; +import com.sdm.common.entity.flowable.dto.ProcessInstanceInfo; +import com.sdm.common.entity.req.data.GetFileBaseInfoReq; +import com.sdm.common.entity.req.flowable.AsyncCallbackRequest; +import com.sdm.common.entity.resp.data.FileMetadataInfoResp; +import com.sdm.common.entity.resp.flowable.DeployFlowableResp; +import com.sdm.common.entity.resp.flowable.ProcessInstanceDetailResponse; +import com.sdm.common.feign.inter.data.IDataFeignClient; +import com.sdm.common.config.FlowableConfig; +import com.sdm.flowable.delegate.UniversalDelegate; +import com.sdm.flowable.dto.req.CompleteTaskReq; +import com.sdm.flowable.dto.req.PreviewNodeInputFilesReq; +import com.sdm.flowable.dto.resp.NodeInputFilePreviewResp; +import com.sdm.flowable.enums.FlowElementTypeEnums; +import com.sdm.flowable.enums.NodeStateEnum; +import com.sdm.flowable.enums.ProcessInstanceStateEnum; +import com.sdm.flowable.service.IAsyncTaskRecordService; +import com.sdm.flowable.service.IProcessNodeParamService; +import com.sdm.flowable.util.Dto2BpmnConverter; +import com.sdm.flowable.util.FlowNodeIdUtils; +import lombok.Data; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.ObjectUtils; +import org.flowable.bpmn.model.*; +import org.flowable.bpmn.model.Process; +import org.flowable.engine.*; +import org.flowable.engine.history.HistoricActivityInstance; +import org.flowable.engine.history.HistoricProcessInstance; +import org.flowable.engine.repository.Deployment; +import org.flowable.engine.repository.ProcessDefinition; +import org.flowable.engine.runtime.Execution; +import org.flowable.engine.runtime.ProcessInstance; +import org.flowable.job.api.Job; +import org.flowable.task.api.Task; +import org.flowable.validation.ProcessValidator; +import org.flowable.validation.ProcessValidatorFactory; +import org.flowable.validation.ValidationError; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.util.StringUtils; +import org.springframework.web.bind.annotation.RequestBody; + +import java.io.File; +import java.util.*; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + + +@Slf4j +@Service +public class ProcessService implements Iprocess{ + @Autowired + private RepositoryService repositoryService; + + @Autowired + private RuntimeService runtimeService; + + @Autowired + private HistoryService historyService; + + @Autowired + private TaskService taskService; + + @Autowired + private ManagementService managementService; + + @Autowired + private Dto2BpmnConverter dto2BpmnConverter; + + @Autowired + private UniversalDelegate universalDelegate; + + @Autowired + private IProcessNodeParamService processNodeParamService; + + @Autowired + private IAsyncTaskRecordService asyncTaskRecordService; + + @Autowired + private IDataFeignClient dataFeignClient; + + // 部署流程(前端传入Flowable标准JSON) + public SdmResponse deploy(ProcessDefinitionDTO processDTO) throws Exception { + log.info("开始部署流程定义: {}",processDTO); + BpmnModel bpmnModel = dto2BpmnConverter.convert(processDTO); + log.info("BPMN模型转换完成"); + + // 检查BPMN模型是否有效 + if (bpmnModel.getProcesses().isEmpty()) { + log.error("无效的BPMN模型:未找到任何流程定义"); + return SdmResponse.failed("无效的BPMN模型:未找到任何流程定义"); + } + + // 验证BPMN模型 + ProcessValidator validator = new ProcessValidatorFactory().createDefaultProcessValidator(); + List validationErrors = validator.validate(bpmnModel); + if (!validationErrors.isEmpty()) { + StringBuilder errorMsg = new StringBuilder("BPMN模型验证失败:"); + for (ValidationError error : validationErrors) { + errorMsg.append("\n - ").append(error.toString()); + } + log.error("BPMN模型验证失败: {}", errorMsg.toString()); + return SdmResponse.failed("BPMN模型验证失败"); + } + + Deployment deployment = repositoryService.createDeployment() + .addBpmnModel("industrial_process.bpmn", bpmnModel) + .name("工业并行部署流程") + .deploy(); + + log.info("流程部署成功, 部署ID: {}, 部署名称: {}", deployment.getId(), deployment.getName()); + // 查询该部署下的流程定义(假设只部署了一个流程) + ProcessDefinition processDefinition = repositoryService.createProcessDefinitionQuery() + .deploymentId(deployment.getId()) + .singleResult(); + + if (processDefinition == null) { + return SdmResponse.failed("部署成功,但未找到关联的流程定义"); + } + DeployFlowableResp resp = new DeployFlowableResp(); + resp.setProcessDefinitionId(processDefinition.getId()); + resp.setProcessDefinitionKey(processDefinition.getKey()); + return SdmResponse.success(resp); + } + + + public void deleteAllDeployments() { + repositoryService.createDeploymentQuery() + .list() + .forEach(deployment + -> repositoryService.deleteDeployment(deployment.getId(), true)); + } + + public ProcessInstance startByProcessDefinitionId(String processDefinitionId, Map variables) { + if (variables == null) { + variables = Collections.emptyMap(); + } + + log.info("根据流程定义ID启动流程实例, 流程定义ID: {}, 变量数量: {}", processDefinitionId, variables.size()); + + ProcessInstance instance = runtimeService.startProcessInstanceById(processDefinitionId, variables); + + log.info("流程实例启动成功, 实例ID: {}, 流程定义ID: {}, 业务Key: {}", + instance.getId(), instance.getProcessDefinitionId(), instance.getBusinessKey()); + + return instance; + } + + + /** + * 挂起流程实例 + */ + public SdmResponse suspendProcessInstance(String processInstanceId) { + log.info("请求挂起流程实例: {}", processInstanceId); + try { + // 检查流程是否存在且处于运行状态 + ProcessInstance processInstance = runtimeService.createProcessInstanceQuery() + .processInstanceId(processInstanceId) + .singleResult(); + + if (processInstance == null) { + return SdmResponse.failed("流程实例不存在或已结束,无法挂起"); + } + + if (processInstance.isSuspended()) { + return SdmResponse.failed("流程实例已经是挂起状态"); + } + + runtimeService.suspendProcessInstanceById(processInstanceId); + log.info("流程实例挂起成功: {}", processInstanceId); + return SdmResponse.success("流程实例挂起成功"); + } catch (Exception e) { + log.error("挂起流程实例失败: {}", e.getMessage(), e); + return SdmResponse.failed("挂起流程实例失败"); + } + } + + /** + * 激活流程实例 + */ + public SdmResponse activateProcessInstance(String processInstanceId) { + log.info("请求激活流程实例: {}", processInstanceId); + try { + ProcessInstance processInstance = runtimeService.createProcessInstanceQuery() + .processInstanceId(processInstanceId) + .singleResult(); + + if (processInstance == null) { + return SdmResponse.failed("流程实例不存在或已结束,无法激活"); + } + + if (!processInstance.isSuspended()) { + return SdmResponse.failed("流程实例已经是激活状态"); + } + + runtimeService.activateProcessInstanceById(processInstanceId); + log.info("流程实例激活成功: {}", processInstanceId); + + // 激活后,立即检查并触发积压的异步回调 + try { + asyncTaskRecordService.triggerPendingTasks(processInstanceId); + } catch (Exception ex) { + log.error("流程激活后的补偿流转发生异常", ex); + } + + return SdmResponse.success(); + } catch (Exception e) { + log.error("激活流程实例失败: {}", e.getMessage(), e); + return SdmResponse.failed("激活流程实例失败 "); + } + } + + /** + * 取消/终止流程实例 + * @param processInstanceId 流程实例ID + */ + public SdmResponse cancelProcessInstance(String processInstanceId) { + log.info("请求取消/终止流程实例: {}", processInstanceId); + try { + // 1. 检查流程是否正在运行 + ProcessInstance processInstance = runtimeService.createProcessInstanceQuery() + .processInstanceId(processInstanceId) + .singleResult(); + + if (processInstance == null) { + // 如果运行时查不到,可能已经结束了,或者不存在 + // 进一步去历史查一下,明确告知用户 + HistoricProcessInstance history = historyService.createHistoricProcessInstanceQuery() + .processInstanceId(processInstanceId) + .singleResult(); + if (history != null) { + return SdmResponse.failed("该流程实例已经结束,无法取消"); + } else { + return SdmResponse.failed("流程实例不存在"); + } + } + + // 2. 执行删除(在Flowable中,终止运行中的流程就是删除运行时数据) + // 参数1: ID, 参数2: 删除原因 (会被写入历史表的 DELETE_REASON_ 字段) + runtimeService.deleteProcessInstance(processInstanceId, ""); + + log.info("流程实例已取消: {}", processInstanceId); + return SdmResponse.success("流程实例已取消"); + } catch (Exception e) { + log.error("取消流程实例失败: {}", e.getMessage(), e); + return SdmResponse.failed("取消流程实例失败" ); + } + } + + // 验证并返回模型验证错误信息 + public List validateModel(ProcessDefinitionDTO processDTO) throws Exception { + BpmnModel bpmnModel = dto2BpmnConverter.convert(processDTO); + ProcessValidator validator = new ProcessValidatorFactory().createDefaultProcessValidator(); + return validator.validate(bpmnModel); + } + + /** + * 查询流程实例及所有节点的详细状态(返回结构化 DTO) + * 如果只传了processDefinitionId,根据流程定义返回流程基本信息和节点信息, + * 如果还传了processInstanceId,再封装流程状态和节点状态 + */ + public SdmResponse getProcessAndNodeDetailByInstanceId(String processDefinitionId, String processInstanceId, String runId) { + ProcessInstanceDetailResponse response = new ProcessInstanceDetailResponse(); + + // 1. 构建所有节点列表 (不过滤,包含 _check, _wait 等) + List allNodes = buildAllNodeDetails(processDefinitionId, runId); + + // 基础流程信息 + ProcessInstanceInfo processInfo = new ProcessInstanceInfo(); + processInfo.setProcessDefinitionId(processDefinitionId); + + // 2. 如果没有实例ID,返回静态结构 + if (!StringUtils.hasText(processInstanceId)) { + response.setProcessInfo(processInfo); + response.setNodes(allNodes); + return SdmResponse.success(response); + } + + // 3. 准备状态上下文 (一次DB查询,全量数据) + ProcessStateContext context = prepareStateContext(processInstanceId); + + // 4. 计算流程实例整体状态 + // 只要 hasDeadLetterJobs 为 false,流程状态就会显示为 running (或 suspended) + processInfo = buildProcessInstanceInfo(processInstanceId, context.isRunning(), context.isSuspended(), context.isHasDeadLetterJobs()); + + // 5. 计算每个节点的状态 (核心逻辑) + calculateNodeStates(allNodes, context); + + response.setProcessInfo(processInfo); + response.setNodes(allNodes); + return SdmResponse.success(response); + } + + + // 内部类:状态上下文 (Data Holder) + @Data + private static class ProcessStateContext { + boolean isRunning; + boolean isSuspended; + boolean hasDeadLetterJobs; + // Map + Map errorMap = new HashMap<>(); + // List + Set activeActivityIds = new HashSet<>(); + // Map + Map historyMap = new HashMap<>(); + } + /** + * 步骤1:构建基础节点结构 + */ + private List buildAllNodeDetails(String processDefinitionId, String runId) { + List flowNodes = getOrderedFlowNodes(processDefinitionId); + + return flowNodes.stream() + .map(this::buildNodeDetailInfoFromFlowNode) // 保持你原有的 DTO 转换逻辑 + .peek(detail -> { + JSONObject params = processNodeParamService.getParam(processDefinitionId, detail.getId(), runId); + detail.setUserParam(params); + }).toList(); + } + + /** + * 步骤3:准备状态数据 (一次性查完) + */ + private ProcessStateContext prepareStateContext(String processInstanceId) { + ProcessStateContext ctx = new ProcessStateContext(); + + // 1. 获取运行时流程实例对象(判断是否运行中、是否挂起) + ProcessInstance runtimeInstance = runtimeService.createProcessInstanceQuery() + .processInstanceId(processInstanceId) + .singleResult(); + ctx.setRunning(runtimeInstance != null); + ctx.setSuspended(ctx.isRunning() && runtimeInstance.isSuspended()); + + // 2. 准备历史数据 + List historicActivities = historyService.createHistoricActivityInstanceQuery() + .processInstanceId(processInstanceId) + .list(); + for (HistoricActivityInstance hist : historicActivities) { + if (!ctx.historyMap.containsKey(hist.getActivityId()) || + hist.getStartTime().after(ctx.historyMap.get(hist.getActivityId()).getStartTime())) { + ctx.historyMap.put(hist.getActivityId(), hist); + } + } + + // 3. 准备运行时 Active ID 列表 + if (ctx.isRunning()) { + ctx.activeActivityIds.addAll(runtimeService.getActiveActivityIds(processInstanceId)); + + // A. 查死信 (Error 来源) + List deadJobs = managementService.createDeadLetterJobQuery() + .processInstanceId(processInstanceId).list(); + + // B. 查普通作业 (补全 Active 来源,防止重试瞬间查不到状态) + List activeJobs = managementService.createJobQuery() + .processInstanceId(processInstanceId).list(); + + // C. 只有当有 作业(死信或普通) 时,才去查 Execution 映射 + if (!deadJobs.isEmpty() || !activeJobs.isEmpty()) { + List executions = runtimeService.createExecutionQuery() + .processInstanceId(processInstanceId).list(); + Map executionToActivityMap = executions.stream() + .filter(e -> e.getActivityId() != null) + .collect(Collectors.toMap(Execution::getId, Execution::getActivityId, (v1, v2) -> v1)); + + // 处理死信 -> 放入 errorMap + if (!deadJobs.isEmpty()) { + ctx.setHasDeadLetterJobs(true); + for (Job job : deadJobs) { + if (job.getExceptionMessage() != null) { + String actId = executionToActivityMap.get(job.getExecutionId()); + if (actId != null) ctx.errorMap.put(actId, job.getExceptionMessage()); + } + } + } + + // 【重点改动】处理普通作业 -> 强制加入 activeActivityIds + // 只要 Job 表里有,它就是 Active 的,不管 Execution 查出来是不是 + for (Job job : activeJobs) { + String actId = executionToActivityMap.get(job.getExecutionId()); + if (actId != null && ! ctx.activeActivityIds.contains(actId)) { + ctx.activeActivityIds.add(actId); + } + } + } + } + return ctx; + } + + // 构建流程实例信息 + private ProcessInstanceInfo buildProcessInstanceInfo(String processInstanceId, boolean isRunning, boolean isSuspended, boolean hasError) { + HistoricProcessInstance historicInstance = historyService.createHistoricProcessInstanceQuery() + .processInstanceId(processInstanceId) + .singleResult(); + + if (historicInstance == null) { + throw new RuntimeException("流程实例不存在: " + processInstanceId); + } + + ProcessInstanceInfo info = new ProcessInstanceInfo(); + info.setProcessInstanceId(historicInstance.getId()); + info.setProcessDefinitionId(historicInstance.getProcessDefinitionId()); + info.setStartTime(historicInstance.getStartTime()); + info.setEndTime(historicInstance.getEndTime()); + + // 计算耗时 + Long duration = historicInstance.getDurationInMillis(); + if (duration == null && historicInstance.getStartTime() != null && isRunning) { + duration = System.currentTimeMillis() - historicInstance.getStartTime().getTime(); + } + info.setDurationInMillis(duration); + info.setDurationFormatted(duration != null ? formatDuration(duration) : null); + + // 【优化点】:设置流程状态 + // 优先级:Error > Suspended > Running > Completed + String status; + if (isRunning) { + // --- 运行中 --- + if (hasError) { + status = ProcessInstanceStateEnum.ERROR.getCode(); // 有死信作业,视为异常 + } else if (isSuspended) { + status = ProcessInstanceStateEnum.SUSPENDED.getCode(); // 被挂起 + } else { + status = ProcessInstanceStateEnum.RUNNING.getCode(); // 正常运行 + } + } else { + // --- 已结束 (运行时查不到,历史表里有) --- + String deleteReason = historicInstance.getDeleteReason(); + + if (deleteReason == null) { + // 1. 正常走完结束节点,deleteReason 为空 + status = ProcessInstanceStateEnum.COMPLETED.getCode(); + } else { + // 2. 有删除原因,说明是被取消或强制终止的 + // 你可以根据 reason 的内容做更细的区分,或者统称为 cancelled + status = ProcessInstanceStateEnum.CANCELLED.getCode(); + } + } + info.setStatus(status); + + return info; + } + + + /** + * 计算节点状态 + */ + private void calculateNodeStates(List nodes, ProcessStateContext ctx) { + for (NodeDetailInfo node : nodes) { + String nodeId = node.getId(); + + // 判断是否是原始节点 (Original Node) + boolean isOriginal = FlowNodeIdUtils.isOriginalNode(nodeId); + + if (isOriginal) { + // === 原始节点:聚合逻辑 === + // 它需要根据 "waitUser -> self -> wait -> check" 整个链条来决定状态 + determineUnifiedState(node, ctx); + } else { + // === 辅助节点:物理逻辑 === + // 它只需要展示自己的真实状态 + determinePhysicalState(node, ctx); + } + } + } + + /** + * 逻辑 A:原始节点状态计算 (聚合所有关联节点) + */ + private void determineUnifiedState(NodeDetailInfo node, ProcessStateContext ctx) { + String origId = node.getId(); + String waitUserId = FlowNodeIdUtils.generateWaitUserTaskId(origId); + String waitId = FlowNodeIdUtils.generateAsyncTaskId(origId); + String checkId = FlowNodeIdUtils.generateCheckTaskId(origId); + + // 1. 判断 ERROR (优先级最高) + // 链条任何一环报错,原始节点都算错 + String errorMsg = ctx.errorMap.get(checkId); // 最常见:check挂了 + if (errorMsg == null) errorMsg = ctx.errorMap.get(origId); // 自己挂了 + // 也可以加上 waitId 的判断,虽然 ReceiveTask 很难挂 + + if (errorMsg != null) { + node.setStatus(NodeStateEnum.ERROR.getCode()); + node.setErrorMessage(errorMsg); + calculateAggregatedTime(node, ctx, waitUserId, origId, checkId); + return; + } + + // 2. 判断 ACTIVE / SUSPENDED + // 链条任何一环在运行,原始节点都算运行 + boolean isActive = ctx.activeActivityIds.contains(origId) || + ctx.activeActivityIds.contains(waitUserId) || + ctx.activeActivityIds.contains(waitId) || + ctx.activeActivityIds.contains(checkId); + + if (isActive) { + // 只要是 Active,就根据流程整体状态来定颜色 + node.setStatus(ctx.isSuspended() ? NodeStateEnum.SUSPENDED.getCode() : NodeStateEnum.ACTIVE.getCode()); + calculateAggregatedTime(node, ctx, waitUserId, origId, checkId); + return; + } + + // 3. 判断 FINISHED + // 必须是链条的"最后一个环节"结束了,才算整个节点结束 + // 顺序:WaitUser -> Original -> Wait -> Check + // 我们检查 Check 是否有历史;如果没有 Check (非HPC节点),检查 Original + HistoricActivityInstance lastHist = ctx.historyMap.get(checkId); + if (lastHist == null) lastHist = ctx.historyMap.get(waitId); // 兼容 + if (lastHist == null) lastHist = ctx.historyMap.get(origId); + + if (lastHist != null && lastHist.getEndTime() != null) { + node.setStatus(NodeStateEnum.FINISHED.getCode()); + calculateAggregatedTime(node, ctx, waitUserId, origId, checkId); + return; + } + + // 4. 默认 PENDING + node.setStatus(NodeStateEnum.PENDING.getCode()); + } + + /** + * 逻辑 B:辅助节点状态计算 (物理状态) + */ + private void determinePhysicalState(NodeDetailInfo node, ProcessStateContext ctx) { + String nodeId = node.getId(); + + // 1. Error + if (ctx.errorMap.containsKey(nodeId)) { + node.setStatus(NodeStateEnum.ERROR.getCode()); + node.setErrorMessage(ctx.errorMap.get(nodeId)); + setTimeFromHistory(node, ctx.historyMap.get(nodeId)); + return; + } + + // 2. Active + if (ctx.activeActivityIds.contains(nodeId)) { + node.setStatus(ctx.isSuspended() ? NodeStateEnum.SUSPENDED.getCode() : NodeStateEnum.ACTIVE.getCode()); + setTimeFromHistory(node, ctx.historyMap.get(nodeId)); + return; + } + + // 3. Finished + HistoricActivityInstance hist = ctx.historyMap.get(nodeId); + if (hist != null && hist.getEndTime() != null) { + node.setStatus(NodeStateEnum.FINISHED.getCode()); + setTimeFromHistory(node, hist); + return; + } + + // 4. Pending + node.setStatus(NodeStateEnum.PENDING.getCode()); + } + + + /** + * 工具:计算聚合时间 (Original Start -> Check End) + */ + private void calculateAggregatedTime(NodeDetailInfo node, ProcessStateContext ctx, + String waitUserId, String origId, String checkId) { + // Start Time: 链条最早的开始时间 + HistoricActivityInstance startNode = ctx.historyMap.get(waitUserId); + if (startNode == null) startNode = ctx.historyMap.get(origId); + + Date startTime = (startNode != null) ? startNode.getStartTime() : null; + node.setStartTime(startTime); + + // End Time: 只有状态是 FINISHED 才有结束时间,取 Check 的结束时间 + Date endTime = null; + if (NodeStateEnum.FINISHED.getCode().equals(node.getStatus())) { + HistoricActivityInstance endNode = ctx.historyMap.get(checkId); + if (endNode == null) endNode = ctx.historyMap.get(origId); + if (endNode != null) endTime = endNode.getEndTime(); + } + node.setEndTime(endTime); + + // Duration + calculateDuration(node, startTime, endTime); + } + + /** + * 工具:设置单个节点的物理时间 + */ + private void setTimeFromHistory(NodeDetailInfo node, HistoricActivityInstance hist) { + if (hist != null) { + node.setStartTime(hist.getStartTime()); + node.setEndTime(hist.getEndTime()); + calculateDuration(node, hist.getStartTime(), hist.getEndTime()); + } + } + + private void calculateDuration(NodeDetailInfo node, Date start, Date end) { + if (start != null) { + long endMillis = (end != null) ? end.getTime() : System.currentTimeMillis(); + long duration = endMillis - start.getTime(); + node.setDurationInMillis(duration); + node.setDurationFormatted(formatDuration(duration)); + } + } + + + /** + * 根据流程定义 ID,按流程执行顺序(BFS)返回所有可达的 FlowNode + */ + private List getOrderedFlowNodes(String processDefinitionId) { + BpmnModel bpmnModel = repositoryService.getBpmnModel(processDefinitionId); + if (bpmnModel == null || bpmnModel.getMainProcess() == null) { + return Collections.emptyList(); + } + + Process process = bpmnModel.getMainProcess(); + + // 找开始事件 + StartEvent startEvent = process.findFlowElementsOfType(StartEvent.class, false) + .stream() + .findFirst() + .orElse(null); + + if (startEvent == null) { + return Collections.emptyList(); + } + + // BFS 遍历 + List orderedNodes = new ArrayList<>(); + Queue queue = new LinkedList<>(); + Set visited = new HashSet<>(); + + queue.offer(startEvent); + visited.add(startEvent.getId()); + + while (!queue.isEmpty()) { + FlowNode current = queue.poll(); + orderedNodes.add(current); + + // 对 outgoingFlows 排序可选(保证并行分支顺序稳定) + current.getOutgoingFlows().stream() + .sorted(Comparator.comparing(SequenceFlow::getId)) // 可选:提升顺序稳定性 + .forEach(flow -> { + String targetRef = flow.getTargetRef(); + FlowElement element = process.getFlowElement(targetRef); + if (element instanceof FlowNode && visited.add(targetRef)) { + queue.offer((FlowNode) element); + } + }); + } + + return orderedNodes; + } + + /** + * 从FlowNode构建NodeDetailInfo基础信息(不含状态) + */ + private NodeDetailInfo buildNodeDetailInfoFromFlowNode(FlowNode node) { + NodeDetailInfo detail = new NodeDetailInfo(); + detail.setId(node.getId()); + detail.setName(node.getName() != null ? node.getName() : ""); + detail.setType(node.getClass().getSimpleName()); + + // 后续节点 + detail.setNextNodeIds( + node.getOutgoingFlows().stream() + .map(SequenceFlow::getTargetRef) + .collect(Collectors.toList()) + ); + + // 扩展属性 + if (node.getExtensionElements() != null) { + List extList = node.getExtensionElements().get(FlowableConfig.EXECUTECONFIG); + if (extList != null && !extList.isEmpty()) { + detail.setExecuteConfig(extList.get(0).getElementText()); + } + } + + return detail; + } + + // --- 工具方法:格式化耗时(毫秒 → 可读字符串)--- + private String formatDuration(long millis) { + long seconds = millis / 1000; + long mins = seconds / 60; + long hours = mins / 60; + long days = hours / 24; + + if (days > 0) return String.format("%dd %dh %dm %ds", days, hours % 24, mins % 60, seconds % 60); + if (hours > 0) return String.format("%dh %dm %ds", hours, mins % 60, seconds % 60); + if (mins > 0) return String.format("%dm %ds", mins, seconds % 60); + return String.format("%ds", seconds); + } + + public SdmResponse> previewNodeInputFiles(PreviewNodeInputFilesReq req) { + log.info("复合文件预览请求: {}", req); + + List resultList = new ArrayList<>(); + + // ================= 1. 处理当前节点 (inputDirId, 需同步 MinIO) ================= + scanCurrentNodeInput(req.getProcessDefinitionId(),req.getRunId(), req.getNodeId(), resultList); + + // ================= 2. 处理前置节点 (outputDirId, 本地直接扫描) ================= + if (ObjectUtils.isNotEmpty(req.getBeforeNodeId())) { + // 预编译正则 Map (Key: Category, Value: Pattern) + Map patternMap = new HashMap<>(); + if (req.getRegexConfig() != null) { + req.getRegexConfig().forEach((k, v) -> { + try { + patternMap.put(k, Pattern.compile(v)); + } catch (Exception e) { + log.error("正则编译失败: key={}, regex={}", k, v); + } + }); + } + + scanPreviousNodeOutput(req.getProcessDefinitionId(),req.getRunId(), req.getBeforeNodeId(), patternMap, resultList); + } + + return SdmResponse.success(resultList); + } + + /** + * 场景1:处理当前节点输入 + * 关注点:inputDirId + * 动作:必须从 MinIO 同步到本地,因为这是用户刚上传的 + */ + private void scanCurrentNodeInput(String processDefinitionId,String runId, String nodeId, List resultList) { + try { + // 1. 获取参数 + JSONObject nodeParams = processNodeParamService.getParam(processDefinitionId,nodeId,runId); + if (nodeParams == null || !nodeParams.containsKey("inputDirId")) { + log.warn("当前节点 {} 未配置 inputDirId,跳过手动上传区扫描", nodeId); + return; + } + Long inputDirId = nodeParams.getLong("inputDirId"); + + // 2. 获取路径 + String objectKey = getObjectKeyByDirId(inputDirId); + if (objectKey == null) return; + + String absDirPath = FlowableConfig.FLOWABLE_SIMULATION_BASEDIR + objectKey; + + File dir = new File(absDirPath); + if (!dir.exists() || !dir.isDirectory()) return; + + File[] files = dir.listFiles(); + if (files == null) return; + + for (File file : files) { + if (file.isFile()) { + NodeInputFilePreviewResp dto = new NodeInputFilePreviewResp(); + dto.setFileName(file.getName()); + dto.setFileSize(String.valueOf(file.length())); + dto.setCreateTime(String.valueOf(file.lastModified())); + dto.setFilePath(file.getAbsolutePath()); + + // 来源标记 + dto.setSource("CURRENT"); + // 类别标记:统一标记为用户上传,不再强行匹配正则分类 + dto.setFileCategory("USER_UPLOAD"); + + resultList.add(dto); + } + } + + } catch (Exception e) { + log.error("扫描当前节点输入失败: nodeId={}", nodeId, e); + } + } + + + /** + * 场景2:处理前置节点输出 + * 关注点:outputDirId + * 动作:直接读本地 (假设大文件不走MinIO,或者上一步HPC已经落地了) + */ + private void scanPreviousNodeOutput(String processDefinitionId,String runId, String beforeNodeId, Map patternMap, List resultList) { + try { + // 1. 获取参数 + JSONObject nodeParams = processNodeParamService.getParam(processDefinitionId,beforeNodeId,runId); + if (nodeParams == null || !nodeParams.containsKey("outputDirId")) { + return; + } + Long outputDirId = nodeParams.getLong("outputDirId"); + + // 2. 获取路径 + String objectKey = getObjectKeyByDirId(outputDirId); + if (objectKey == null) return; + + String absDirPath = FlowableConfig.FLOWABLE_SIMULATION_BASEDIR + objectKey; + + // 3. 【不同步】直接扫本地 + // 假设前置节点是 HPC 节点,结果已经写在共享存储上了 + File dir = new File(absDirPath); + if (!dir.exists() || !dir.isDirectory()) return; + + File[] files = dir.listFiles(); + if (files == null) return; + + for (File file : files) { + if (!file.isFile()) continue; + + String category = null; + // 必须匹配正则才能入选 + if (!patternMap.isEmpty()) { + for (Map.Entry entry : patternMap.entrySet()) { + if (entry.getValue().matcher(file.getName()).matches()) { + category = entry.getKey(); + break; + } + } + } + + // 如果匹配到了分类 (Master/Input 等),则添加 + if (category != null) { + NodeInputFilePreviewResp dto = new NodeInputFilePreviewResp(); + dto.setFileName(file.getName()); + dto.setFileSize(String.valueOf(file.length())); + dto.setCreateTime(String.valueOf(file.lastModified())); + dto.setFilePath(file.getAbsolutePath()); + + dto.setSource("PREVIOUS"); + dto.setFileCategory(category); + + resultList.add(dto); + } + // 没匹配到直接丢弃 (前置节点的无关文件) + } + + } catch (Exception e) { + log.error("扫描前置节点输出失败: beforeNodeId={}", beforeNodeId, e); + } + } + + /** + * 辅助:获取 ObjectKey + */ + private String getObjectKeyByDirId(Long dirId) { + GetFileBaseInfoReq fileReq = new GetFileBaseInfoReq(); + fileReq.setFileId(dirId); + SdmResponse fileResp = dataFeignClient.getFileBaseInfo(fileReq); + if (!fileResp.isSuccess() || fileResp.getData() == null) { + log.warn("查询文件夹信息失败: dirId={}", dirId); + return null; + } + return fileResp.getData().getObjectKey(); + } + + + public SdmResponse continueServiceTask(@RequestBody CompleteTaskReq req) { + log.info("开始继续服务任务处理, 请求参数: {}", req); + + String taskDefKey; + + // 根据类型确定真正的 taskDefinitionKey + if (FlowElementTypeEnums.fromString(req.getFlowelementType()).equals(FlowElementTypeEnums.SERVICETASK)) { + // 如果是 ServiceTask 前置等待节点 + taskDefKey = FlowNodeIdUtils.generateWaitUserTaskId(req.getTaskDefinitionKey()); + log.info("识别为ServiceTask前置等待节点, 原始任务定义Key: {}, 转换后任务定义Key: {}", req.getTaskDefinitionKey(), taskDefKey); + } else { + // 普通 UserTask + taskDefKey = req.getTaskDefinitionKey(); + log.info("识别为普通UserTask, 任务定义Key: {}", taskDefKey); + } + + Task task = taskService.createTaskQuery() + .processInstanceId(req.getProcessInstanceId()) + .taskDefinitionKey(taskDefKey) + .singleResult(); + + log.info("任务查询完成, 流程实例ID: {}, 任务定义Key: {}, 查询结果为空: {}", req.getProcessInstanceId(), taskDefKey, task == null); + + if (task == null) { + log.error("找不到任务!流程实例ID: {}, taskDefinitionKey={}", req.getProcessInstanceId(), taskDefKey); + throw new RuntimeException("找不到任务! taskDefinitionKey=" + taskDefKey); + } + + log.info("准备完成任务, 任务ID: {}, 变量数量: {}", task.getId(), + req.getVariables() != null ? req.getVariables().size() : 0); + + + try { + // 完成任务 + if (req.getVariables() != null) { + taskService.complete(task.getId(), req.getVariables()); + log.info("任务完成(带变量), 任务ID: {}", task.getId()); + } else { + taskService.complete(task.getId()); + log.info("任务完成(无变量), 任务ID: {}", task.getId()); + } + } catch (Exception e) { + log.error("完成任务时发生错误, 任务ID: {}, 异常信息: {}", task.getId(), e.getMessage()); + throw new RuntimeException("完成任务时发生错误, 错误信息: " + e.getMessage()); + } + + log.info("服务任务处理完成, 任务ID: {}", task.getId()); + return SdmResponse.success(); + } + + public SdmResponse asyncCallback(AsyncCallbackRequest request) { + log.info("接收到异步回调请求, 请求内容: {}", request); + // 发送信号唤醒流程实例中等待的节点 + universalDelegate.signalByTaskId(request); + log.info("异步回调处理转发完成, 任务ID: {}", request.getAsyncTaskId()); + return SdmResponse.success(); + } + + /** + * 查找特定节点的死信作业ID,用于后续重试操作。 + * 支持查找 原始节点 OR 哨兵节点(_check) 的死信作业 + * @param processInstanceId 流程实例ID + * @param activityId 节点ID + * @return + */ + private String findDeadJobId(String processInstanceId, String activityId) { + // 1. 先查出该实例下所有的死信作业 (通常不会很多) + List deadJobs = managementService.createDeadLetterJobQuery() + .processInstanceId(processInstanceId) + .list(); + + if (deadJobs.isEmpty()) { + throw new RuntimeException("未找到任何死信作业,请确认流程是否已结束或任务处于重试等待中"); + } + + // 2. 查出该实例下所有的 Execution (为了获取 ActivityId 映射) + // 这样只需要查 2 次数据库,而不是 N+1 次 + List executions = runtimeService.createExecutionQuery() + .processInstanceId(processInstanceId) + .list(); + + // 3. 构建 ExecutionId -> ActivityId 的映射 + Map execActivityMap = executions.stream() + .filter(e -> e.getActivityId() != null) + .collect(Collectors.toMap(Execution::getId, Execution::getActivityId, (v1, v2) -> v1)); + + // 3. 预测可能的哨兵节点 ID + String checkNodeId = FlowNodeIdUtils.generateCheckTaskId(activityId); + + // 4. 在内存中匹配找到对应的 Job + return deadJobs.stream() + .filter(job -> { + String actId = execActivityMap.get(job.getExecutionId()); + // 既匹配原始ID,也匹配哨兵ID + return activityId.equals(actId) || checkNodeId.equals(actId); + }) + .findFirst() + .map(Job::getId) + .orElseThrow(() -> new RuntimeException("在死信队列中未找到节点 [" + activityId + "] 或其关联校验节点的作业")); + } + + public SdmResponse retryFailedNode( String processInstanceId,String failNodeId) { + try { + // 2. 查找 Job ID (参考上面的代码) + String jobId = findDeadJobId(processInstanceId, failNodeId); + // 3. 执行重试 + managementService.moveDeadLetterJobToExecutableJob(jobId, 1); + log.info("作业已恢复,等待异步执行器拾取执行..."); + return SdmResponse.success("重试任务已提交"); + } catch (Exception e) { + log.error("重试节点失败, 流程ID: {}, 节点ID: {}, 异常信息: {}", processInstanceId, failNodeId, e.getMessage(), e); + throw new RuntimeException("重试失败"); + } + } + + /** + * 任意节点跳转重试 (Rewind/Jump) + * 场景:节点失败(进死信)后,用户修改参数,跳转回任意前置节点重新跑。 + * + * @param procInstId 流程实例ID + * @param targetNodeId 目标逻辑节点ID (用户想跳去哪里,如 "TaskA") + * @param newVariables 新的参数 + */ + public SdmResponse retryToNode(String procInstId, String targetNodeId, Map newVariables) { + log.info("开始执行回退重试 (Rewind), 流程: {}, 目标: {}", procInstId, targetNodeId); + + // 1. 获取当前流程实例中 **所有** 活跃/停滞的节点 + // 包含:正在运行的、报错进死信的、UserTask等待中的 + // 为什么要全拿?防止并行分支回退时产生幽灵分支。 + List allActiveActivityIds = getFailedActivityIds(procInstId); + + if (allActiveActivityIds.isEmpty()) { + return SdmResponse.failed("当前流程已结束或状态异常,无法执行跳转"); + } + + log.info("当前活跃节点集合: {},将全部收束至目标: {}", allActiveActivityIds, targetNodeId); + + // 2. 更新流程变量 + if (newVariables != null && !newVariables.isEmpty()) { + runtimeService.setVariables(procInstId, newVariables); + } + + // 3. 净室清理 (已移除,由 Delegate/Listener 自动处理) + + // 4. 执行全量跳转 + try { + runtimeService.createChangeActivityStateBuilder() + .processInstanceId(procInstId) + // 【关键修改】使用 moveActivityIdsToSingleActivityId + // 将所有分散的 Token 收束到一个目标点,解决并行分支回退问题 + .moveActivityIdsToSingleActivityId(allActiveActivityIds, targetNodeId) + .changeState(); + + log.info("跳转指令执行成功"); + } catch (Exception e) { + log.error("流程跳转失败", e); + // 常见错误:目标节点ID不存在,或者不能从子流程跳出等 + throw new RuntimeException("流程跳转失败: " + e.getMessage()); + } + + return SdmResponse.success("已执行回退重试,流程流转至: " + targetNodeId); + } + + /** + * 辅助方法:获取当前流程实例中,所有关联了死信作业的 ActivityId + */ + private List getFailedActivityIds(String procInstId) { + // 1. 查死信 Job + List deadJobs = managementService.createDeadLetterJobQuery() + .processInstanceId(procInstId) + .list(); + + if (deadJobs.isEmpty()) { + return Collections.emptyList(); + } + + // 2. 提取死信 Job 对应的 Execution ID 集合 + Set failedExecutionIds = deadJobs.stream() + .map(Job::getExecutionId) + .collect(Collectors.toSet()); + + // 3. 查出该流程实例下的所有 Execution + List allExecutions = runtimeService.createExecutionQuery() + .processInstanceId(procInstId) + .list(); + + // 4. 内存匹配:找到 failedExecutionIds 对应的 ActivityId + return allExecutions.stream() + // 只保留那些有死信 Job 的 Execution + .filter(e -> failedExecutionIds.contains(e.getId())) + .map(Execution::getActivityId) + .filter(Objects::nonNull) // 过滤掉没有 ActivityId 的(如根 Execution) + .distinct() + .collect(Collectors.toList()); + } +} + + + +package com.sdm.flowable.service; + +import com.baomidou.mybatisplus.extension.service.IService; +import com.sdm.common.entity.req.flowable.AsyncCallbackRequest; +import com.sdm.flowable.entity.AsyncTaskRecord; +import org.flowable.engine.delegate.DelegateExecution; + +import java.util.Map; + +/** + *

+ * 异步任务执行记录表 服务类 + *

+ * + * @author author + * @since 2025-11-26 + */ +public interface IAsyncTaskRecordService extends IService { + /** + * 注册异步任务 + */ + String registerAsyncTask(DelegateExecution execution, String receiveTaskId, String handlerType, Map bizParams,String status,String asyncTaskId); + + /** + * 异步回调恢复流程 + */ + void completeAsyncTask(AsyncCallbackRequest request); + + /** + * 补偿流转:触发所有挂起的任务,用于流程重启激活时恢复ReceiveTask执行状态 + */ + void triggerPendingTasks(String processInstanceId); +} +
+ + +package com.sdm.flowable.service.impl; + +import com.alibaba.excel.util.StringUtils; +import com.alibaba.fastjson2.JSON; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import com.sdm.common.entity.req.flowable.AsyncCallbackRequest; +import com.sdm.flowable.dao.AsyncTaskRecordMapper; +import com.sdm.flowable.entity.AsyncTaskRecord; +import com.sdm.flowable.enums.AsyncTaskStatusEnum; +import com.sdm.flowable.service.IAsyncTaskRecordService; +import lombok.extern.slf4j.Slf4j; +import org.flowable.engine.RuntimeService; +import org.flowable.engine.delegate.DelegateExecution; +import org.flowable.engine.runtime.Execution; +import org.flowable.engine.runtime.ProcessInstance; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.util.List; +import java.util.Map; + +import static com.sdm.common.config.FlowableConfig.RECEIVETASK_CALLBACKE_MSG; +import static com.sdm.common.config.FlowableConfig.RECEIVETASK_CALLBACKE_STATUS; + +/** + *

+ * 异步任务执行记录表 服务实现类 + *

+ * + * @author author + * @since 2025-11-26 + */ +@Service +@Slf4j +public class AsyncTaskRecordServiceImpl extends ServiceImpl implements IAsyncTaskRecordService { + @Autowired + private RuntimeService runtimeService; + + /** + * 注册异步任务 + */ + public String registerAsyncTask(DelegateExecution execution, String receiveTaskId, String handlerType, Map bizParams,String status,String asyncTaskId) { + AsyncTaskRecord record = new AsyncTaskRecord(); + record.setAsyncTaskId(asyncTaskId); + record.setProcessInstanceId(execution.getProcessInstanceId()); + record.setExecutionId(execution.getId()); + record.setReceiveTaskId(receiveTaskId); + record.setHandlerType(handlerType); + record.setRequestJson(JSON.toJSONString(bizParams)); + record.setStatus(status); + this.save( record); + return asyncTaskId; + } + + /** + * 异步回调恢复流程 + * 务必加上事务,保证业务表和流程引擎状态一致 + * 为了保证“强一致性”,防止出现“任务报错了但流程却继续往下走”或者“流程已挂起但回调强行推进”的情况,需在这里加三层校验逻辑。 + */ + @Transactional(rollbackFor = Exception.class) + public void completeAsyncTask(AsyncCallbackRequest request) { + // 1. 查询业务记录 + AsyncTaskRecord record = this.lambdaQuery() + .eq(AsyncTaskRecord::getAsyncTaskId, request.getAsyncTaskId()) + .one(); + if (record == null) { + throw new RuntimeException("未找到对应的异步任务记录: " + request.getAsyncTaskId()); + } + + // 【校验一:幂等性检查】 + // 防止网络抖动导致回调重复发送,如果已经是结束状态,直接返回,不做处理 + if (AsyncTaskStatusEnum.SUCCESS.getCode().equals(record.getStatus()) || AsyncTaskStatusEnum.FAIL.getCode().equals(record.getStatus())) { + log.warn("任务 {} 已处理完毕,忽略重复回调", request.getAsyncTaskId()); + return; + } + + // 【校验二:流程生命周期检查】 + // 检查流程实例是否存在?是否被挂起? + ProcessInstance processInstance = runtimeService.createProcessInstanceQuery() + .processInstanceId(record.getProcessInstanceId()) + .singleResult(); + + if (processInstance == null) { + // 流程可能已经被用户删除了,这时候只需要更新业务表归档,不需要报错 + log.warn("流程实例 {} 不存在或已结束,仅更新业务记录", record.getProcessInstanceId()); + record.setStatus(getRequestStatus(request)); // 根据请求更新状态 + record.setResultJson(request.getResultJson()); + this.updateById(record); + return; + } + + if (processInstance.isSuspended()) { + log.info("流程 {} 处于挂起状态,异步任务 {} 结果已暂存,等待激活时自动补偿流转。", + record.getProcessInstanceId(), record.getAsyncTaskId()); + // 仅更新数据库,不推流程 + record.setStatus(getRequestStatus(request)); // 根据请求更新状态 + record.setResultJson(request.getResultJson()); + this.updateById(record); + return; + } + + // 3. 查询执行实例 (ReceiveTask) + Execution exec = runtimeService.createExecutionQuery() + .executionId(record.getExecutionId()) + .activityId(record.getReceiveTaskId()) + .singleResult(); + + if (exec == null) { + // 这种情况很极端,属于:业务表显示RUNNING,但Flowable里节点不在ReceiveTask(可能回退了,或者并发问题) + log.error("流程节点状态不一致,当前流程不在等待节点: " + record.getReceiveTaskId()); + throw new RuntimeException("流程节点状态不一致,当前流程不在等待节点: " + record.getReceiveTaskId()); + } + + record.setStatus(getRequestStatus(request)); + record.setResultJson(request.getResultJson()); + this.updateById(record); + + // 4. 触发流程流转 + // 使用 Local 变量,将状态绑定在当前 Execution 上,防止并行任务变量污染 + runtimeService.setVariableLocal(exec.getId(), RECEIVETASK_CALLBACKE_STATUS, record.getStatus()); + runtimeService.setVariableLocal(exec.getId(), RECEIVETASK_CALLBACKE_MSG, record.getResultJson()); + runtimeService.trigger(exec.getId()); + } + + private String getRequestStatus(AsyncCallbackRequest request) { + return StringUtils.isNotBlank(request.getStatus()) ? request.getStatus() : AsyncTaskStatusEnum.FAIL.getCode(); + + } + + + /** + * 补偿流转:触发所有挂起的任务,用于流程重启激活时恢复ReceiveTask执行状态 + */ + @Transactional(rollbackFor = Exception.class) + public void triggerPendingTasks(String processInstanceId) { + // 1. 查询该流程实例下,所有业务状态已完成(SUCCESS/FAIL)的异步任务 + List finishedRecords = this.lambdaQuery() + .eq(AsyncTaskRecord::getProcessInstanceId, processInstanceId) + .in(AsyncTaskRecord::getStatus, "SUCCESS", "FAIL") + .list(); + + if (finishedRecords.isEmpty()) { + return; + } + + log.info("开始检查流程 {} 的积压回调任务,共 {} 条", processInstanceId, finishedRecords.size()); + + for (AsyncTaskRecord record : finishedRecords) { + // 2. 检查 Flowable 引擎中,对应的 execution 是否还在等待 + Execution execution = runtimeService.createExecutionQuery() + .executionId(record.getExecutionId()) + .activityId(record.getReceiveTaskId()) + .singleResult(); + + // 3. 只有当:业务已完成 && 流程还在等待 时,才进行补偿触发 + if (execution != null) { + log.info("发现积压任务 {} (Execution: {}),正在执行补偿流转...", + record.getAsyncTaskId(), record.getExecutionId()); + + // 【关键修正】这里必须也要设置变量,否则 Delegate 获取不到状态会空指针或误判 + runtimeService.setVariableLocal(execution.getId(), RECEIVETASK_CALLBACKE_STATUS, record.getStatus()); + runtimeService.setVariableLocal(execution.getId(), RECEIVETASK_CALLBACKE_MSG, record.getResultJson()); + runtimeService.trigger(execution.getId()); + } + } + } +} +
+ + +package com.sdm.flowable.service.impl; + +import com.alibaba.fastjson2.JSONObject; +import com.sdm.common.config.FlowableConfig; +import com.sdm.flowable.service.IProcessNodeParamService; +import com.sdm.flowable.util.FlowNodeIdUtils; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.flowable.engine.delegate.DelegateExecution; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * 节点执行策略 + * 运行时策略层 (NodeExecutionStrategy):实现根据 runId + nodeId 查询数据库参数,决定是否跳过节点。 + */ +@Slf4j +@Component("nodeExecutionStrategy") // BPMN 表达式中引用的 Bean 名称 +public class NodeExecutionStrategy { + + @Autowired + private IProcessNodeParamService processNodeParamService; + + /** + * 决定是否跳过 _waitUser 节点 + * @param execution 当前执行上下文 + * @return true = 跳过(自动执行), false = 不跳过(手动等待) + */ + public boolean shouldSkip(DelegateExecution execution) { + try { + log.info("执行策略判定:当前节点 {}", execution.getCurrentActivityId()); + // 1. 获取当前节点ID (例如 taskA_waitUser) + String currentActivityId = execution.getCurrentActivityId(); + String processDefinitionId = execution.getProcessDefinitionId(); + + // 2. 解析出原始 ServiceTask ID (例如 taskA) + // 因为参数是保存在 ServiceTask 上的,不是保存在 _waitUser 上的 + String serviceNodeId = FlowNodeIdUtils.getOriginalNodeIdFromWaitUserTask(currentActivityId); + if (serviceNodeId == null) { + log.warn("节点 {} 命名不符合规范,无法解析原始ID,默认手动", currentActivityId); + return false; + } + + // 3. 获取 runId (流程启动时必须传入 runId 变量) + String runId = (String) execution.getVariable("runId"); + if (StringUtils.isBlank(runId)) { + log.error("流程变量中缺失 runId,无法查询节点参数,强制转为手动模式。节点: {}", serviceNodeId); + return false; + } + + // 4. 查询数据库 process_node_param + // 使用现有的 getParam 方法查询 ServiceTask 的参数 + JSONObject params = processNodeParamService.getParam(processDefinitionId,serviceNodeId,runId); + log.info("是否手动控制节点 {} 参数: {}", serviceNodeId, params); + + // 5. 判断执行模式 + if (params != null && params.containsKey(FlowableConfig.EXECUTE_MODE_KEY)) { + String mode = params.getString(FlowableConfig.EXECUTE_MODE_KEY); + + if (FlowableConfig.EXECUTE_MODE_AUTO.equalsIgnoreCase(mode)) { + log.info("策略判定:节点 {} 配置为 [自动执行] -> 跳过等待", serviceNodeId); + return true; // Skip = true (自动) + } + } + + // 6. 默认逻辑:如果没有配置,或者配置为 MANUAL,则不跳过 + log.info("策略判定:节点 {} 配置为 [手动/未配置] -> 进入等待状态", serviceNodeId); + return false; // Skip = false (手动) + + } catch (Exception e) { + log.error("执行策略判定异常,降级为手动模式", e); + return false; // 安全起见,出错时暂停流程,避免失控 + } + } +} + + + +package com.sdm.flowable.service.impl; + +import com.alibaba.fastjson2.JSONObject; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.sdm.flowable.entity.ProcessNodeParam; +import com.sdm.flowable.dao.ProcessNodeParamMapper; +import com.sdm.flowable.service.IProcessNodeParamService; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import org.apache.commons.lang3.ObjectUtils; +import org.flowable.engine.RuntimeService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + *

+ * 流程节点输入参数表 服务实现类 + *

+ * + * @author author + * @since 2025-11-25 + */ +@Service +public class ProcessNodeParamServiceImpl extends ServiceImpl implements IProcessNodeParamService { + + + @Autowired + private ObjectMapper objectMapper; + + @Autowired + private RuntimeService runtimeService; + + // 保存节点输入参数(按流程实例保存,用于参数模板) + public void saveParamByProcessDefinitionId(String processDefinitionId, String nodeId, String runId, Map params) { + ProcessNodeParam param = new ProcessNodeParam(); + param.setProcessDefinitionId(processDefinitionId); + param.setNodeId(nodeId); + param.setRunId(runId); + try { + param.setParamJson(objectMapper.writeValueAsString(params)); + } catch (JsonProcessingException e) { + throw new RuntimeException("参数序列化失败", e); + } + // 存在则更新,不存在则插入 + ProcessNodeParam existing = this.lambdaQuery().eq(ProcessNodeParam::getRunId, runId).eq(ProcessNodeParam::getProcessDefinitionId, processDefinitionId).eq(ProcessNodeParam::getNodeId, nodeId).one(); + if (existing != null) { + param.setId(existing.getId()); + this.updateById(param); + } else { + this.save(param); + } + } + + // 流程启动后,更新流程实例ID + public void updateNodeParamProcessInstanceId(String runId, String processDefinitionId,String processInstanceId ) { + List processNodeParams = this.lambdaQuery() + .eq(ProcessNodeParam::getRunId, runId) + .eq(ProcessNodeParam::getProcessDefinitionId, processDefinitionId).list(); + + if (ObjectUtils.isNotEmpty(processNodeParams)) { + this.lambdaUpdate() + .eq(ProcessNodeParam::getRunId, runId) + .eq(ProcessNodeParam::getProcessDefinitionId, processDefinitionId) + .set(ProcessNodeParam::getProcessInstanceId, processInstanceId) + .update(); + } + } + + + // 查询节点输入参数(流程执行时调用) + public JSONObject getParam(String processDefinitionId, String nodeId, String runId) { + ProcessNodeParam param = this.lambdaQuery() + .eq(ObjectUtils.isNotEmpty(runId),ProcessNodeParam::getRunId, runId) + .eq(ProcessNodeParam::getProcessDefinitionId, processDefinitionId) + .eq(ProcessNodeParam::getNodeId, nodeId) + .one(); + if (param == null) { + // 当未配置参数时不抛出异常,而是返回空Map + return new JSONObject(); + } + return JSONObject.parseObject(param.getParamJson()); + } +} +
+ + +package com.sdm.flowable.service.impl; + +import com.sdm.flowable.enums.ProcessInstanceStateEnum; +import org.flowable.engine.HistoryService; +import org.flowable.engine.ManagementService; +import org.flowable.engine.RuntimeService; +import org.flowable.engine.history.HistoricProcessInstance; +import org.flowable.engine.runtime.ProcessInstance; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class ProcessStateHelper { + + @Autowired private RuntimeService runtimeService; + @Autowired private HistoryService historyService; + @Autowired private ManagementService managementService; + + /** + * 计算流程实例的当前状态(严格状态机逻辑) + */ + public ProcessInstanceStateEnum determineProcessState(String processInstanceId) { + // 1. 查运行时 + ProcessInstance runtimeInstance = runtimeService.createProcessInstanceQuery() + .processInstanceId(processInstanceId).singleResult(); + + if (runtimeInstance != null) { + // --- 运行时状态判断 --- + + // A. 检查是否有死信作业 (优先判定为 FAILED) + long deadLetterCount = managementService.createDeadLetterJobQuery() + .processInstanceId(processInstanceId).count(); + if (deadLetterCount > 0) { + return ProcessInstanceStateEnum.ERROR; + } + + // B. 检查是否挂起 + if (runtimeInstance.isSuspended()) { + return ProcessInstanceStateEnum.SUSPENDED; + } + + // C. 默认为运行中 + return ProcessInstanceStateEnum.RUNNING; + } else { + // --- 历史状态判断 (已结束) --- + HistoricProcessInstance history = historyService.createHistoricProcessInstanceQuery() + .processInstanceId(processInstanceId).singleResult(); + + if (history == null) { + // 查无此人,视为已终止或不存在,抛异常或返回特定状态 + throw new IllegalArgumentException("流程实例不存在: " + processInstanceId); + } + + if (history.getDeleteReason() == null) { + return ProcessInstanceStateEnum.COMPLETED; + } else { + return ProcessInstanceStateEnum.CANCELLED; // 对应 TERMINATED + } + } + } +} + + + +package com.sdm.flowable.service; + +import com.alibaba.fastjson2.JSONObject; +import com.sdm.flowable.entity.ProcessNodeParam; +import com.baomidou.mybatisplus.extension.service.IService; + +import java.util.Map; + +/** + *

+ * 流程节点输入参数表 服务类 + *

+ * + * @author author + * @since 2025-11-25 + */ +public interface IProcessNodeParamService extends IService { + void saveParamByProcessDefinitionId(String processDefinitionId, String nodeId, String runId, Map params); + void updateNodeParamProcessInstanceId(String runId,String processDefinitionId, String processInstanceId ); + /** + * 进入流程执行页面,查询节点输入参数 + * + * @param processDefinitionId 流程定义ID + * @param nodeId 节点ID + * @param runId 流程实例ID + * @return 节点输入参数 + */ + JSONObject getParam(String processDefinitionId, String nodeId, String runId); +} +
+ + +package com.sdm.flowable.util; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.sdm.common.entity.flowable.dto.FlowElementDTO; +import com.sdm.common.entity.flowable.dto.ProcessDefinitionDTO; +import com.sdm.common.entity.flowable.executeConfig.BaseExecuteConfig; +import com.sdm.common.config.FlowableConfig; +import com.sdm.flowable.enums.FlowElementTypeEnums; +import lombok.extern.slf4j.Slf4j; +import org.flowable.bpmn.model.*; +import org.flowable.bpmn.model.Process; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.*; +import java.util.stream.Collectors; + +/** + * DTO → Flowable BpmnModel 映射工具类(核心) + */ +@Slf4j +@Component +public class Dto2BpmnConverter { + + @Autowired + private ObjectMapper objectMapper; + + /** + * 核心映射方法 + */ + public BpmnModel convert(ProcessDefinitionDTO dto) throws JsonProcessingException { + // 1. 初始化 Flowable 顶层对象 + BpmnModel bpmnModel = new BpmnModel(); + Process process = new Process(); + process.setId(dto.getProcess().getId()); // 流程ID=DTO的process.id + process.setName(dto.getProcess().getName()); // 流程名称=DTO的process.name + bpmnModel.addProcess(process); + + // 2. 分离 DTO 中的「节点」和「连线」 + List allElements = dto.getFlowElements(); + List nodeDtos = allElements.stream() + .filter(e -> !FlowElementTypeEnums.SEQUENCEFLOW.getType().equals(e.getType())) + .toList(); + List flowDtos = allElements.stream() + .filter(e -> FlowElementTypeEnums.SEQUENCEFLOW.getType().equals(e.getType())) + .collect(Collectors.toList()); + + // 3. 存储异步任务ReceiveTask 映射关系(原节点ID → ReceiveTask节点ID) + // 这里 Value 存的是 _wait 节点 ID,用于标识这个节点开启了异步链条 + Map asyncTaskMap = new HashMap<>(); // 异步任务映射(原节点→ReceiveTask节点) + // 3.1、存储等待用户输入任务映射关系(原节点ID → waitUserTask节点ID) + // 这里 Value 存的是 _waitUser 节点 ID,用于标识这个节点开启了等待用户操作 + Map waitUserTaskMap = new HashMap<>(); // 原节点ID → waitUserTask节点ID + + + // 4. 存储并行网关映射关系(原节点ID → 网关ID) + Map splitGatewayMap = new HashMap<>(); // 拆分网关(原拆分节点→拆分网关) + Map joinGatewayMap = new HashMap<>(); // 汇总网关(原汇总节点→汇总网关) + + // 5. 先创建所有节点(实际节点+等待任务节点+网关节点) + for (FlowElementDTO nodeDto : nodeDtos) { + // 处理异步任务,创建 ReceiveTask(_wait) 和 ServiceTask(_check),放在创建实际节点createActualNode方法之前是为了构造asyncTaskMap,后面createActualNode的时候才能设置回调等待节点 + handleAsyncTasks(process, nodeDto, asyncTaskMap); + // 处理等待用户提交任务 + handleWaitUserTask(process, nodeDto, waitUserTaskMap); + // 创建实际节点 + createActualNode(process, nodeDto, asyncTaskMap); + // 处理并行网关,创建拆分和汇聚节点 + addRequiredGateways(process, nodeDto, flowDtos, joinGatewayMap, splitGatewayMap); + } + + // 添加通用重试任务节点 + addRetryTask(process); + + // 6. 创建连线 + createConnections(process, flowDtos, asyncTaskMap,waitUserTaskMap, joinGatewayMap, splitGatewayMap); + + validProcess(process); + + return bpmnModel; + } + + /** + * 验证流程元素的唯一性 + */ + private static void validProcess(Process process) { + Map counter = new HashMap<>(); + + for (FlowElement e : process.getFlowElements()) { + counter.put(e.getId(), counter.getOrDefault(e.getId(), 0) + 1); + } + + counter.forEach((id, cnt) -> { + if (cnt > 1) { + log.error("❌ 重复 ID 发现: {} 出现次数: {}", id, cnt); + } + }); + } + + /** + * 添加通用重试任务节点 + */ + private void addRetryTask(Process process) { + // 通用重试中转任务(无 incoming/outgoing) + UserTask retryInputTask = new UserTask(); + retryInputTask.setId(FlowNodeIdUtils.getRetryTaskId()); + retryInputTask.setName("通用重试任务"); + + // 添加 ExecutionListener + FlowableListener listener = new FlowableListener(); + listener.setEvent("end"); // "start"、"end"、"take" 等 + listener.setImplementationType(ImplementationType.IMPLEMENTATION_TYPE_DELEGATEEXPRESSION); + listener.setImplementation("${retryRedirectListener}"); + retryInputTask.getExecutionListeners().add(listener); + + process.addFlowElement(retryInputTask); + } + + /** + * 处理异步任务节点 + * 逻辑:Original -> Wait(Receive) -> Check(Service) -> Next + */ + private void handleAsyncTasks(Process process, FlowElementDTO nodeDto, Map asyncTaskMap) { + // 检查节点是否为服务任务或用户任务且标记为异步回调 + if (isAsyncCallbackEnabled(nodeDto)) { + String originalNodeId = nodeDto.getId(); + String waitNodeId = FlowNodeIdUtils.generateAsyncTaskId(originalNodeId);// originalNodeId_wait + String checkNodeId = FlowNodeIdUtils.generateCheckTaskId(originalNodeId); // originalNodeId_check + + // 1. 创建 ReceiveTask (_wait) + ReceiveTask receiveTask = new ReceiveTask(); + receiveTask.setAsynchronous(true); + disableAsyncRetry(receiveTask); + receiveTask.setId(waitNodeId); + receiveTask.setName(nodeDto.getName() + "等待结果"); + process.addFlowElement(receiveTask); + log.info("创建完成 ReceiveTask异步任务等待节点:原始ID={}, 等待结果节点ID={}", originalNodeId, waitNodeId); + + // 2. 创建哨兵节点 ServiceTask (_check) + ServiceTask checkTask = new ServiceTask(); + checkTask.setId(checkNodeId); + checkTask.setName(nodeDto.getName() + "结果校验"); + checkTask.setAsynchronous(true); // 必须异步,产生 Job 用于报错 + // 设置快速失败(1次重试,失败即死信),保证强一致性 + disableAsyncRetry(receiveTask); + // 绑定校验 Delegate + checkTask.setImplementation("${asyncResultCheckDelegate}"); + checkTask.setImplementationType(ImplementationType.IMPLEMENTATION_TYPE_DELEGATEEXPRESSION); + process.addFlowElement(checkTask); + log.info("创建完成 哨兵节点ServiceTask异步任务结果校验节点:原始ID={}, 校验结果节点ID={}", originalNodeId, checkNodeId); + + // 3. 内部连线:Wait -> Check + // 这是一条隐形的内部连线,永远固定 + process.addFlowElement(createSequenceFlow(waitNodeId, checkNodeId, null)); + log.info("异步任务链构建:originalNodeId:{} -> waitNodeId:{} -> checkNodeId:{}", originalNodeId,waitNodeId, checkNodeId); + + // 记录映射关系 + asyncTaskMap.put(originalNodeId, waitNodeId); + } + } + + private boolean isAsyncCallbackEnabled(FlowElementDTO nodeDto) { + return (FlowElementTypeEnums.SERVICETASK.getType().equals(nodeDto.getType()) || + FlowElementTypeEnums.USERTASK.getType().equals(nodeDto.getType())) && + nodeDto.getExtensionElements() != null && + nodeDto.getExtensionElements().getExecuteConfig() != null && + nodeDto.getExtensionElements().getExecuteConfig().isAsyncCallback(); + } + + private boolean isWaitUserEnabled(FlowElementDTO nodeDto) { + return FlowElementTypeEnums.SERVICETASK.getType().equals(nodeDto.getType()) && + nodeDto.getExtensionElements() != null && + nodeDto.getExtensionElements().getExecuteConfig() != null && + nodeDto.getExtensionElements().getExecuteConfig().isWaitUser(); + } + + private void handleWaitUserTask(Process process, FlowElementDTO nodeDto, Map waitUserTaskMap) { + // 只有当前节点是ServiceTask才需要判断是否等待用户输入,需要才创建前置UserTask + if (FlowElementTypeEnums.SERVICETASK.getType().equals(nodeDto.getType())) { + String originalNodeId = nodeDto.getId(); + String waitUserId = FlowNodeIdUtils.generateWaitUserTaskId(originalNodeId); + + UserTask waitUserTask = new UserTask(); + waitUserTask.setAsynchronous(true); + disableAsyncRetry(waitUserTask); + waitUserTask.setId(waitUserId); + waitUserTask.setName(nodeDto.getName() + "等待用户提交"); + + // 绑定 SkipExpression 到策略 Bean,运行时引擎会调用 nodeExecutionStrategy.shouldSkip(execution) + waitUserTask.setSkipExpression("${nodeExecutionStrategy.shouldSkip(execution)}"); + + // 不设置assignee,让任何人可以处理 + process.addFlowElement(waitUserTask); + + log.info("等待用户输入任务处理:原始ID={}, 等待用户提交节点ID={}", originalNodeId, waitUserId); + // 记录映射 + waitUserTaskMap.put(originalNodeId, waitUserId); + } + } + + /** + * 添加必要的网关(并行拆分网关和并行汇聚网关) + */ + private void addRequiredGateways(Process process, FlowElementDTO nodeDto, List flowDtos, + Map joinGatewayMap, Map splitGatewayMap) { + String nodeId = nodeDto.getId(); + + // 计算入度和出度 + long incomingCount = flowDtos.stream().filter(f -> f.getTargetRef().equals(nodeId)).count(); + long outgoingCount = flowDtos.stream().filter(f -> f.getSourceRef().equals(nodeId)).count(); + + // 检查是否需要添加汇聚网关(入度>1) + if (incomingCount > 1) { + // 如果入度>1,则在节点前插入汇聚网关 + String joinGatewayId = FlowNodeIdUtils.generateJoinGatewayId(nodeId); + ParallelGateway joinGateway = new ParallelGateway(); + joinGateway.setId(joinGatewayId); + joinGateway.setName("并行汇聚-" + nodeDto.getName()); + process.addFlowElement(joinGateway); + joinGatewayMap.put(nodeId, joinGatewayId); + log.info("添加并行汇聚网关:节点ID={}, 汇聚网关ID={}", nodeId, joinGatewayId); + } + + // 检查是否需要添加拆分网关(出度>1) + if (outgoingCount > 1) { + // 如果出度>1,则在节点后插入拆分网关 + String splitGatewayId = FlowNodeIdUtils.generateSplitGatewayId(nodeId); + ParallelGateway splitGateway = new ParallelGateway(); + splitGateway.setId(splitGatewayId); + splitGateway.setName("并行拆分-" + nodeDto.getName()); + process.addFlowElement(splitGateway); + splitGatewayMap.put(nodeId, splitGatewayId); + log.info("添加并行拆分网关:节点ID={}, 拆分网关ID={}", nodeId, splitGatewayId); + } + } + + /** + * 全新的连线构造逻辑 + */ + private void createConnections(Process process, + List flowDtos, + Map asyncTaskMap, + Map waitUserTaskMap, + Map joinGatewayMap, + Map splitGatewayMap) { + + // ==================================================================================== + // ① 第一阶段:先把所有 DTO 原始连线直接画出来(不考虑网关和等待节点) + // ==================================================================================== + createInitialConnections(process, flowDtos); + + // ==================================================================================== + // ② 第二阶段:网关处理(删除原始连线 → 使用网关替代) + // ==================================================================================== + handleGatewayConnections(process, flowDtos, joinGatewayMap, splitGatewayMap); + + // ==================================================================================== + // ③ 第三阶段:处理异步任务(等待节点) + // (Original -> Wait -> Check -> Next) + // ==================================================================================== + handleAsyncTaskConnections(process, asyncTaskMap); + + // ==================================================================================== + // ④ 第三阶段:处理等待用户提交任务 + // (Prev -> WaitUser -> Original) + // ==================================================================================== + handleWaitUserTaskConnections(process, waitUserTaskMap); + } + + /** + * 第一阶段:创建初始连接 + * 遍历所有流程元素DTO,根据源引用和目标引用创建基本的顺序流连接 + */ + private void createInitialConnections(Process process, List flowDtos) { + for (FlowElementDTO flowDto : flowDtos) { + String source = flowDto.getSourceRef(); + String target = flowDto.getTargetRef(); + + SequenceFlow flow = createSequenceFlow(source, target, flowDto.getName()); + process.addFlowElement(flow); + } + } + + /** + * 第二阶段:处理网关连接 + * 包括删除需要被网关替代的原始连线以及重建涉及汇聚网关和拆分网关的连接关系 + */ + private void handleGatewayConnections(Process process, + List flowDtos, + Map joinGatewayMap, + Map splitGatewayMap) { + // Step 2.1 删除所有不该存在的连线(因为网关将替代) + List toRemove = new ArrayList<>(); + + for (FlowElement ele : process.getFlowElements()) { + if (ele instanceof SequenceFlow) { + SequenceFlow sf = (SequenceFlow) ele; + + // 目标节点有汇聚网关 → 删除原连线 + if (joinGatewayMap.containsKey(sf.getTargetRef())) { + toRemove.add(sf); + } + + // 源节点有拆分网关 → 删除原连线 + if (splitGatewayMap.containsKey(sf.getSourceRef())) { + toRemove.add(sf); + } + } + } + toRemove.forEach(e -> process.removeFlowElement(e.getId())); + + + // Step 2.2 重建汇聚网关连线(incoming:所有入线 → joinGW → 原节点) + for (String nodeId : joinGatewayMap.keySet()) { + + String joinGW = joinGatewayMap.get(nodeId); + + // 1. 所有"指向 nodeId 的原始入度" → joinGW + List incomingSources = flowDtos.stream() + .filter(f -> f.getTargetRef().equals(nodeId)) + .map(FlowElementDTO::getSourceRef) + .toList(); + + for (String src : incomingSources) { + process.addFlowElement(createSequenceFlow(src, joinGW, null)); + } + + // 2. joinGW → nodeId + process.addFlowElement(createSequenceFlow(joinGW, nodeId, null)); + } + + + // Step 2.3 重建拆分网关连线(node → splitGW → 所有原本出度) + for (String nodeId : splitGatewayMap.keySet()) { + + String splitGW = splitGatewayMap.get(nodeId); + + // 1. nodeId → splitGW + process.addFlowElement(createSequenceFlow(nodeId, splitGW, null)); + + // 2. splitGW → 所有 target + List outgoingTargets = flowDtos.stream() + .filter(f -> f.getSourceRef().equals(nodeId)) + .map(FlowElementDTO::getTargetRef) + .toList(); + + for (String target : outgoingTargets) { + process.addFlowElement(createSequenceFlow(splitGW, target, null)); + } + } + } + + /** + * 第三阶段:处理异步任务连接 + * 针对已标记为异步回调的任务节点,将其连接重构为' Original -> Wait -> Check -> Targets(NextNodes)'的模式 + */ + private void handleAsyncTaskConnections(Process process, Map asyncTaskMap) { + for (String originalNodeId : asyncTaskMap.keySet()) { + + String waitNodeId = asyncTaskMap.get(originalNodeId); + + // _check 节点 ID (通过命名规则推算) + String checkNodeId = FlowNodeIdUtils.generateCheckTaskId(originalNodeId); + + // Step 3.1 找出所有"原节点 → target"的连线,并删除 + List removeLines = new ArrayList<>(); + List targets = new ArrayList<>(); + + for (FlowElement ele : process.getFlowElements()) { + if (ele instanceof SequenceFlow) { + SequenceFlow sf = (SequenceFlow) ele; + if (sf.getSourceRef().equals(originalNodeId)) { + targets.add(sf.getTargetRef()); + removeLines.add(sf); + } + } + } + + removeLines.forEach(f -> process.removeFlowElement(f.getId())); + + + // Step 3.2 添加:original → wait + process.addFlowElement(createSequenceFlow(originalNodeId, waitNodeId, null)); + // 注:Wait -> Check 的连线已经在 handleAsyncTasks 中内部添加了,这里不需要加 + // Step 3.3 添加:Check -> Targets (原本的下游节点) + for (String target : targets) { + process.addFlowElement(createSequenceFlow(checkNodeId, target, null)); + } + } + } + + private void handleWaitUserTaskConnections(Process process, Map waitUserTaskMap) { + for (String originalNodeId : waitUserTaskMap.keySet()) { + String waitUserId = waitUserTaskMap.get(originalNodeId); + + // Step 1: 找出原节点的所有入线,改为指向 waitUserTask + List removeLines = new ArrayList<>(); + List originalSources = new ArrayList<>(); + + for (FlowElement ele : process.getFlowElements()) { + if (ele instanceof SequenceFlow sf) { + if (sf.getTargetRef().equals(originalNodeId)) { + originalSources.add(sf.getSourceRef()); + removeLines.add(sf); + } + } + } + removeLines.forEach(f -> process.removeFlowElement(f.getId())); + + // Step 2: 添加原来的入线 → waitUserTask + for (String src : originalSources) { + process.addFlowElement(createSequenceFlow(src, waitUserId, null)); + } + + // Step 3: waitUserTask → 原节点 + process.addFlowElement(createSequenceFlow(waitUserId, originalNodeId, null)); + } + } + + + /** + * 创建实际的流程节点 + */ + private void createActualNode(Process process, FlowElementDTO nodeDto, Map asyncTaskMap) throws JsonProcessingException { + FlowElementTypeEnums elementType = FlowElementTypeEnums.fromString(nodeDto.getType()); + + switch (elementType) { + case STARTEVENT: + // 开始事件:直接映射 + StartEvent startEvent = new StartEvent(); + // 【关键修改】设置异步:确保 startProcessInstanceById 接口立刻返回 ID + startEvent.setAsynchronous(true); + startEvent.setId(nodeDto.getId()); + startEvent.setName(nodeDto.getName()); + + // 2. 添加执行监听器:自动设置 skip 开关变量 + FlowableListener skipEnableListener = new FlowableListener(); + skipEnableListener.setEvent("start"); // 在开始事件启动时触发 + skipEnableListener.setImplementationType(ImplementationType.IMPLEMENTATION_TYPE_EXPRESSION); + // 直接使用表达式设置变量,不需要额外写 Java 类,主要是为了让 SkipExpression能生效 + skipEnableListener.setImplementation("${execution.setVariable('_FLOWABLE_SKIP_EXPRESSION_ENABLED', true)}"); + startEvent.getExecutionListeners().add(skipEnableListener); + + process.addFlowElement(startEvent); + log.info("创建开始事件节点 nodeId:{}", nodeDto.getId()); + break; + + case ENDEVENT: + // 结束事件:直接映射 + EndEvent endEvent = new EndEvent(); + endEvent.setId(nodeDto.getId()); + endEvent.setName(nodeDto.getName()); + process.addFlowElement(endEvent); + log.info("创建结束事件节点 nodeId:{}", nodeDto.getId()); + break; + + case USERTASK: + createUserTask(process, nodeDto, asyncTaskMap); + break; + + case SERVICETASK: + createServiceTask(process, nodeDto, asyncTaskMap); + break; + + default: + // 对于未支持的类型,可以选择抛出异常或者忽略 + throw new IllegalArgumentException("Unsupported element type: " + nodeDto.getType()); + } + } + + private void createUserTask(Process process, FlowElementDTO nodeDto, Map asyncTaskMap) throws JsonProcessingException { + // 用户任务:映射为 Flowable UserTask + UserTask userTask = new UserTask(); + // 【关键修改】设置异步:防止下方绑定的监听器(创建文件夹)报错导致前一个节点回滚 + // 这会创建一个 Job 来执行 UserTask 的初始化逻辑(包括执行监听器) + userTask.setAsynchronous(true); + disableAsyncRetry(userTask); + userTask.setId(nodeDto.getId()); + userTask.setName(nodeDto.getName()); + log.info("创建用户任务节点 nodeId:{}", nodeDto.getId()); + // 绑定控制参数(和 ServiceTask 类似) + if (nodeDto.getExtensionElements() != null && nodeDto.getExtensionElements().getExecuteConfig() != null) { + BaseExecuteConfig userTaskExecuteConfig = nodeDto.getExtensionElements().getExecuteConfig(); + // 设置异步回调节点ID + userTaskExecuteConfig.setCallbackNodeId(asyncTaskMap.getOrDefault(nodeDto.getId(), null)); + + String configJson = objectMapper.writeValueAsString(userTaskExecuteConfig); + log.info("用户任务userTask的executeConfig配置:{}", configJson); + ExtensionElement extensionElement = createFlowableElement( + FlowableConfig.EXECUTECONFIG, configJson); + userTask.getExtensionElements() + .computeIfAbsent(FlowableConfig.EXECUTECONFIG, k -> new ArrayList<>()) + .add(extensionElement); + + } + // 设置用户任务的属性,使其可以被任何人处理 + // 不设置 assignee 或 candidateUsers,这样任何人都可以处理任务 + + // 可选:绑定 TaskListener,在任务完成时触发逻辑 + FlowableListener dirPrepareListener = new FlowableListener(); + dirPrepareListener.setEvent("start"); + dirPrepareListener.setImplementationType(ImplementationType.IMPLEMENTATION_TYPE_DELEGATEEXPRESSION); + dirPrepareListener.setImplementation("${userTaskDirectoryPreparationListener}"); + userTask.getExecutionListeners().add(dirPrepareListener); + + process.addFlowElement(userTask); + } + + private void createServiceTask(Process process, FlowElementDTO nodeDto, Map asyncTaskMap) throws JsonProcessingException { + // 服务任务:映射为 Flowable ServiceTask,绑定自定义执行器 + ServiceTask serviceTask = new ServiceTask(); + // 【关键修改】设置异步:实现业务逻辑故障隔离,避免阻塞和连环回滚 + serviceTask.setAsynchronous(true); + disableAsyncRetry(serviceTask); + serviceTask.setId(nodeDto.getId()); + serviceTask.setName(nodeDto.getName()); + // 绑定执行器(Bean名称:customTaskExecutor) + serviceTask.setImplementation("${universalDelegate}"); + serviceTask.setImplementationType(ImplementationType.IMPLEMENTATION_TYPE_DELEGATEEXPRESSION); + log.info("创建服务任务节点 nodeId:{}", nodeDto.getId()); + if (nodeDto.getExtensionElements() != null && nodeDto.getExtensionElements().getExecuteConfig() != null) { + // 添加 Flowable 扩展属性 + BaseExecuteConfig serviceTaskExecuteConfig = nodeDto.getExtensionElements().getExecuteConfig(); + // 设置异步回调节点ID + serviceTaskExecuteConfig.setCallbackNodeId(asyncTaskMap.getOrDefault(nodeDto.getId(), null)); + + String configJson = objectMapper.writeValueAsString(serviceTaskExecuteConfig); + log.info("服务任务serviceTask的executeConfig配置:{}", configJson); + ExtensionElement extensionElement = createFlowableElement(FlowableConfig.EXECUTECONFIG, configJson); + serviceTask.getExtensionElements().computeIfAbsent(FlowableConfig.EXECUTECONFIG, k -> new ArrayList<>()) + .add(extensionElement); + + } + + process.addFlowElement(serviceTask); + } + + private ExtensionElement createFlowableElement(String name, String value) { + ExtensionElement element = new ExtensionElement(); + element.setName(name); + element.setNamespace("http://flowable.org/bpmn"); + element.setNamespacePrefix("flowable"); + element.setElementText(value); + return element; + } + + /** + * 辅助方法:创建 Flowable 连线(SequenceFlow) + */ + private SequenceFlow createSequenceFlow(String sourceRef, String targetRef, String name) { + SequenceFlow flow = new SequenceFlow(); + // 确保生成的ID符合NCName规范(以字母开头) + flow.setId("s" + UUID.randomUUID().toString().replace("-", "")); + flow.setSourceRef(sourceRef); // 源节点ID + flow.setTargetRef(targetRef); // 目标节点ID + if (name != null) { + flow.setName(name); + } + return flow; + } + + /** + * 配置节点失败后不重试(R0/PT0S) + * @param flowElement 需要配置的节点 + */ + private void disableAsyncRetry(FlowElement flowElement) { + ExtensionElement retryElement = new ExtensionElement(); + // 必须加上 flowable 的命名空间前缀 + retryElement.setNamespace("http://flowable.org/bpmn"); + retryElement.setNamespacePrefix("flowable"); + retryElement.setName("failedJobRetryTimeCycle"); + // R0 代表重试 0 次,即失败立刻停止 + retryElement.setElementText("R0/PT0S"); + + flowElement.addExtensionElement(retryElement); + } +} + + + +package com.sdm.flowable.util; + +import com.sdm.common.config.FlowableConfig; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.io.FileUtils; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +@Slf4j +public class FlowNodeIdUtils { + private static final String JOIN_GATEWAY_PREFIX = FlowableConfig.JOIN_GATEWAY_PREFIX; + private static final String SPLIT_GATEWAY_PREFIX = FlowableConfig.SPLIT_GATEWAY_PREFIX; + private static final String ASYNC_TASK_SUFFIX = FlowableConfig.ASYNC_TASK_SUFFIX; + private static final String WAIT_USER_SUFFIX = FlowableConfig.WAIT_USER_SUFFIX; + private static final String CHECK_SUFFIX = FlowableConfig.CHECK_SUFFIX; // 后置哨兵 + + // ==================== 网关 ==================== + + public static String generateJoinGatewayId(String nodeId) { + return JOIN_GATEWAY_PREFIX + nodeId; + } + + public static String generateSplitGatewayId(String nodeId) { + return SPLIT_GATEWAY_PREFIX + nodeId; + } + + public static boolean isJoinGateway(String id) { + return id != null && id.startsWith(JOIN_GATEWAY_PREFIX); + } + + public static boolean isSplitGateway(String id) { + return id != null && id.startsWith(SPLIT_GATEWAY_PREFIX); + } + + // ==================== 异步接收任务 ==================== + + public static String generateAsyncTaskId(String nodeId) { + return nodeId + ASYNC_TASK_SUFFIX; + } + + public static boolean isAsyncTask(String id) { + return id != null && id.endsWith(ASYNC_TASK_SUFFIX); + } + + public static String getOriginalNodeIdFromAsyncTask(String asyncTaskId) { + if (!isAsyncTask(asyncTaskId)) { + throw new IllegalArgumentException("不是异步等待节点: " + asyncTaskId); + } + return asyncTaskId.substring(0, asyncTaskId.length() - ASYNC_TASK_SUFFIX.length()); + } + + // ==================== 用户等待任务 ==================== + + public static String generateWaitUserTaskId(String nodeId) { + return nodeId + WAIT_USER_SUFFIX; + } + + public static boolean isWaitUserTask(String id) { + return id != null && id.endsWith(WAIT_USER_SUFFIX); + } + + public static String getOriginalNodeIdFromWaitUserTask(String waitUserTaskId) { + if (!isWaitUserTask(waitUserTaskId)) { + throw new IllegalArgumentException("不是隐藏等待节点: " + waitUserTaskId); + } + return waitUserTaskId.substring(0, waitUserTaskId.length() - WAIT_USER_SUFFIX.length()); + } + // ===================== 后置哨兵 ==================== + public static String generateCheckTaskId(String nodeId) { + return nodeId + CHECK_SUFFIX; + } + + public static boolean isCheckTask(String id) { + return id != null && id.endsWith(CHECK_SUFFIX); + } + + public static String getOriginalNodeIdFromCheckTask(String checkTaskId) { + if (!isCheckTask(checkTaskId)) { + throw new IllegalArgumentException("不是后置哨兵节点: " + checkTaskId); + } + return checkTaskId.substring(0, checkTaskId.length() - CHECK_SUFFIX.length()); + } + + // --- 解析器 (反向查找原始ID) --- + + /** + * 如果传入的是辅助节点ID,返回原始ID;如果是原始ID,返回本身 + */ + public static String parseOriginalNodeId(String nodeId) { + if (nodeId == null) return null; + if (nodeId.endsWith(WAIT_USER_SUFFIX)) { + return nodeId.substring(0, nodeId.length() - WAIT_USER_SUFFIX.length()); + } + if (nodeId.endsWith(ASYNC_TASK_SUFFIX)) { + return nodeId.substring(0, nodeId.length() - ASYNC_TASK_SUFFIX.length()); + } + if (nodeId.endsWith(CHECK_SUFFIX)) { + return nodeId.substring(0, nodeId.length() - CHECK_SUFFIX.length()); + } + return nodeId; + } + // ==================== 重试任务 ==================== + + public static String getRetryTaskId() { + return FlowableConfig.RETRY_TASK_ID; + } + + /** + * 准备本地目录:如果目录已存在,则清空其内容;否则创建新目录。 + * 流程实例启动后,需要在本地准备一个目录,用于存储节点计算结果。 + * 如果同一个流程二次启动,每个节点会使用同一个文件夹,二次启动的时候, + * 如果清空,上一次流程实例运行结果相关文件也会在这个文件夹中,影响这次运行流程的结果文件 + * + * @param objectKey MinIO 的对象路径,将作为本地目录路径的一部分 + */ + public static void prepareLocalDir(String objectKey) { + String simulationBaseDir = FlowableConfig.FLOWABLE_SIMULATION_BASEDIR; + Path localBaseDir = Paths.get(simulationBaseDir).toAbsolutePath().normalize(); + Path fullLocalPath = localBaseDir.resolve(objectKey).normalize(); + + // 安全校验:防止路径穿越 + if (!fullLocalPath.startsWith(localBaseDir)) { + throw new RuntimeException("非法文件夹路径,可能包含路径穿越: " + objectKey); + } + + try { + if (Files.exists(fullLocalPath)) { + //直接删除整个目录 + log.info("本地目录已存在,将删除并重新创建: {}", fullLocalPath); + FileUtils.deleteDirectory(fullLocalPath.toFile()); + } + log.info("创建本地目录: {}", fullLocalPath); + Files.createDirectories(fullLocalPath); + } catch (Exception e) { + throw new RuntimeException("无法准备本地目录: " + fullLocalPath, e); + } + } + + + /** + * 判断是否是原始业务节点(即不是任何辅助节点) + * 用于过滤掉不需要给前端展示的中间节点 + */ + public static boolean isOriginalNode(String nodeId) { + if (nodeId == null) return false; + return !nodeId.endsWith(WAIT_USER_SUFFIX) && + !nodeId.endsWith(ASYNC_TASK_SUFFIX) && + !nodeId.endsWith(CHECK_SUFFIX) && + !nodeId.startsWith(JOIN_GATEWAY_PREFIX) && // 过滤网关 + !nodeId.startsWith(SPLIT_GATEWAY_PREFIX); + } +} + + + +server: + port: 7106 +spring: + application: + name: flowable + datasource: + url: jdbc:mysql://192.168.190.161:3306/flowable?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai + username: root + password: mysql + driver-class-name: com.mysql.cj.jdbc.Driver + flowable: + # ????????? + database-schema-update: true + # ??????JOB + async-executor-activate: true + cloud: + nacos: + discovery: + server-addr: 192.168.190.161:8848 + group: DEV_GROUP + enabled: true + +logging: + level: + org: + flowable: INFO + +mybatis-plus: + mapper-locations: classpath*:/mapper/**/*.xml + type-aliases-package: com.sdm.flowable.model.entity + configuration: + map-underscore-to-camel-case: true + global-config: + db-config: + id-type: auto +security: + whitelist: + paths: + - /process/testHpc + - /process/asyncCallback + + + +server: + port: 7106 +spring: + application: + name: flowable + datasource: + url: jdbc:mysql://192.168.65.161:3306/flowable?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai + username: root + password: mysql + driver-class-name: com.mysql.cj.jdbc.Driver + flowable: + # ????????? + database-schema-update: true + # ??????JOB + async-executor-activate: true + cloud: + nacos: + discovery: + server-addr: 192.168.65.161:8848 + group: DEV_GROUP + enabled: true + +logging: + level: + org: + flowable: INFO + +mybatis-plus: + mapper-locations: classpath*:/mapper/**/*.xml + type-aliases-package: com.sdm.flowable.model.entity + configuration: + map-underscore-to-camel-case: true + global-config: + db-config: + id-type: auto +security: + whitelist: + paths: + - /process/testHpc + - /process/asyncCallback + + + +server: + port: 7106 +spring: + application: + name: flowable + datasource: + url: jdbc:mysql://192.168.65.161:3306/flowable?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai + username: root + password: mysql + driver-class-name: com.mysql.cj.jdbc.Driver + flowable: + # ????????? + database-schema-update: true + # ??????JOB + async-executor-activate: true + cloud: + nacos: + discovery: + server-addr: 192.168.65.161:8848 + group: LOCAL_GROUP + enabled: true + +logging: + level: + org: + flowable: INFO + +mybatis-plus: + mapper-locations: classpath*:/mapper/**/*.xml + type-aliases-package: com.sdm.flowable.model.entity + configuration: + map-underscore-to-camel-case: true + global-config: + db-config: + id-type: auto + +security: + whitelist: + paths: + - /process/testHpc + - /process/asyncCallback + + + +server: + port: 7106 +spring: + application: + name: flowable + datasource: + url: jdbc:mysql://192.168.30.146:3306/flowable?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai + username: root + password: mysql + driver-class-name: com.mysql.cj.jdbc.Driver + flowable: + # ????????? + database-schema-update: true + # ??????JOB + async-executor-activate: true + cloud: + nacos: + discovery: + server-addr: 192.168.30.146:8848 + group: LYRIC_GROUP + enabled: true + +logging: + level: + org: + flowable: INFO + +mybatis-plus: + mapper-locations: classpath*:/mapper/**/*.xml + type-aliases-package: com.sdm.flowable.model.entity + configuration: + map-underscore-to-camel-case: true + global-config: + db-config: + id-type: auto + +security: + whitelist: + paths: + - /process/testHpc + - /process/asyncCallback + + + +server: + port: 7106 +spring: + application: + name: flowable + datasource: + url: jdbc:mysql://192.168.30.146:3306/flowable?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai + username: root + password: mysql + driver-class-name: com.mysql.cj.jdbc.Driver + flowable: + # ????????? + database-schema-update: true + # ??????JOB + async-executor-activate: true + cloud: + nacos: + discovery: + server-addr: 192.168.30.146:8848 + group: PROD_GROUP + enabled: true + +logging: + level: + org: + flowable: INFO + +mybatis-plus: + mapper-locations: classpath*:/mapper/**/*.xml + type-aliases-package: com.sdm.flowable.model.entity + configuration: + map-underscore-to-camel-case: true + global-config: + db-config: + id-type: auto +security: + whitelist: + paths: + - /process/testHpc + - /process/asyncCallback + + + +spring: + profiles: + active: local + + +