This commit is contained in:
2025-11-27 16:20:05 +08:00
7 changed files with 149 additions and 19 deletions

View File

@@ -75,7 +75,6 @@
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
@@ -89,4 +88,4 @@
</plugins>
</build>
</project>
</project>

View File

@@ -1,12 +1,12 @@
CREATE TABLE `flowable`.`process_node_param`
(
`id` bigint NOT NULL AUTO_INCREMENT,
`processDefinitionId` varchar(64) DEFAULT NULL COMMENT '流程定义ID',
`processInstanceId` varchar(64) DEFAULT NULL COMMENT '流程实例ID',
`nodeId` varchar(64) DEFAULT NULL COMMENT '节点ID',
`paramJson` text COMMENT '输入参数JSON',
`createTime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updateTime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `uk_processDefinitionId_nodeId` (`processDefinitionId`,`nodeId`)
UNIQUE KEY `uk_processInstanceId_nodeId` (`processInstanceId`,`nodeId`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='流程节点输入参数表';

View File

@@ -22,7 +22,8 @@ import lombok.Data;
@JsonSubTypes.Type(value = LocalAppExecuteConfig.class, name = "localApp"),
@JsonSubTypes.Type(value = HPCExecuteConfig.class, name = "HPC"),
@JsonSubTypes.Type(value = HttpExecuteConfig.class, name = "http"),
@JsonSubTypes.Type(value = DataProcessExecuteConfig.class, name = "dataProcess")
@JsonSubTypes.Type(value = DataProcessExecuteConfig.class, name = "dataProcess"),
@JsonSubTypes.Type(value = ExportWordScriptExecuteConfig.class, name = "exportWordScript")
})
public abstract class BaseExecuteConfig {
// 公共字段executeType子类无需重复定义父类统一维护

View File

@@ -0,0 +1,4 @@
package com.sdm.flowable.config.executeConfig;
public class ExportWordScriptExecuteConfig extends BaseExecuteConfig {
}

View File

@@ -0,0 +1,132 @@
package com.sdm.flowable.delegate.handler;
import com.sdm.common.entity.req.data.UploadFilesReq;
import com.sdm.common.feign.inter.data.IDataFeignClient;
import com.sdm.common.utils.RandomUtil;
import com.sdm.flowable.config.executeConfig.BaseExecuteConfig;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.flowable.engine.delegate.DelegateExecution;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.mock.web.MockMultipartFile;
import java.io.*;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @Description: 生成自动化报告脚本处理器
* @Author: shiman
* @Date: 2023-03-05 23:09
*/
@Slf4j
@Component("exportWordScript")
public class ExportWordScriptHandler implements ExecutionHandler {
@Autowired
private IDataFeignClient dataFeignClient;
private static final String TEMP_REPORT_PATH = "/opt/report/";
// todo 用户需要上传脚本文件到当前算列的导出报告节点输入文件夹下,params需要记下脚本文件id此处暂时写死
// 输入参数: 生成结果报告文件名
// 用户得会写脚本文件脚本exportWord.py或者使用我们指定的生成报告脚本
// 文件imageFileIdList: 用户选择指定图片文件id列表用于生成报告时插入到报告中
@Override
public void execute(DelegateExecution execution, Map<String, Object> params, BaseExecuteConfig config) {
List<Long> imageFileIdList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(imageFileIdList)) {
String randomId = RandomUtil.generateString(16);
log.info("临时路径为:{}", randomId);
for (Long fileId : imageFileIdList) {
dataFeignClient.downloadFileToLocal(fileId, TEMP_REPORT_PATH + randomId);
}
// 调用脚本
log.info("调用脚本中。。。。。。");
String commands = "python /opt/script/exportWord.py " + TEMP_REPORT_PATH + randomId + File.separator;
log.info("command:" + commands);
List<String> result = new ArrayList<>();
int runningStatus = -1;
try {
log.info("开始同步执行脚本");
Process process = Runtime.getRuntime().exec(commands);
log.info("准备获取脚本输出");
log.info("开始获取脚本输出");
BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
String line;
while ((line = reader.readLine()) != null) {
log.info("executePython" + line);
result.add(line);
}
log.info("脚本执行完成");
runningStatus = process.waitFor();
log.info("脚本运行状态:" + runningStatus);
} catch (IOException | InterruptedException e) {
log.error("执行脚本失败:" + e);
return;
}
if (runningStatus != 0) {
log.error("执行脚本失败");
return;
} else {
log.info(commands + "执行脚本完成!");
}
try {
// 获取临时路径中脚本生成的报告
uploadResultFileToMinio(TEMP_REPORT_PATH + randomId + File.separator + "report.docx");
} catch (Exception ex) {
log.error("生成自动化报告失败:{}", ex.getMessage(), ex);
throw new RuntimeException("生成自动化报告失败");
}
// 删除临时路径
log.info("删除临时路径:{},中。。。。。。", randomId);
deleteFolder(new File(TEMP_REPORT_PATH + randomId));
}
}
private void uploadResultFileToMinio(String resultFilePath) {
try {
File resultFile = new File(resultFilePath);
if (!resultFile.exists()) {
log.warn("结果文件不存在: {}", resultFilePath);
return;
}
// 创建一个临时的MultipartFile实现
MultipartFile multipartFile = new MockMultipartFile(
resultFile.getName(),
resultFile.getName(),
"application/json",
Files.readAllBytes(resultFile.toPath())
);
// 上传到MinIO
UploadFilesReq req = new UploadFilesReq();
req.setDirId(1L);
req.setFile(multipartFile);
// 调用上传文件的方法
// 注意:这里应该处理返回值
dataFeignClient.uploadFiles(req);
log.info("结果文件已上传到MinIO: {}", resultFilePath);
} catch (Exception e) {
log.error("上传结果文件到MinIO失败: {}", resultFilePath, e);
throw new RuntimeException("上传结果文件到MinIO失败: " + e.getMessage(), e);
}
}
public static void deleteFolder(File folder) {
if (folder.isDirectory()) {
File[] files = folder.listFiles();
if (files != null) {
for (File file : files) {
deleteFolder(file);
}
}
}
folder.delete();
}
}

View File

@@ -32,9 +32,9 @@ public class ProcessNodeParam implements Serializable {
@TableId(value = "id", type = IdType.AUTO)
private Long id;
@ApiModelProperty(value = "流程定义ID")
@TableField("processDefinitionId")
private String processDefinitionId;
@ApiModelProperty(value = "流程实例ID")
@TableField("processInstanceId")
private String processInstanceId;
@ApiModelProperty(value = "节点ID")
@TableField("nodeId")

View File

@@ -33,10 +33,10 @@ public class ProcessNodeParamServiceImpl extends ServiceImpl<ProcessNodeParamMap
@Autowired
private RuntimeService runtimeService;
// 保存节点输入参数(按流程定义ID保存,用于参数模板)
public void saveParamByDefinitionId(String processDefinitionId, String nodeId, Map<String, Object> params) {
// 保存节点输入参数(按流程实例保存,用于参数模板)
public void saveParamByDefinitionId(String processInstanceId, String nodeId, Map<String, Object> params) {
ProcessNodeParam param = new ProcessNodeParam();
param.setProcessDefinitionId(processDefinitionId);
param.setProcessInstanceId(processInstanceId);
param.setNodeId(nodeId);
try {
param.setParamJson(objectMapper.writeValueAsString(params));
@@ -44,7 +44,7 @@ public class ProcessNodeParamServiceImpl extends ServiceImpl<ProcessNodeParamMap
throw new RuntimeException("参数序列化失败", e);
}
// 存在则更新,不存在则插入
ProcessNodeParam existing = this.lambdaQuery().eq(ProcessNodeParam::getProcessDefinitionId, processDefinitionId).eq(ProcessNodeParam::getNodeId, nodeId).one();
ProcessNodeParam existing = this.lambdaQuery().eq(ProcessNodeParam::getProcessInstanceId, processInstanceId).eq(ProcessNodeParam::getNodeId, nodeId).one();
if (existing != null) {
param.setId(existing.getId());
this.updateById(param);
@@ -55,13 +55,7 @@ public class ProcessNodeParamServiceImpl extends ServiceImpl<ProcessNodeParamMap
// 查询节点输入参数(流程执行时调用)
public Map<String, Object> getParam(String procInstId, String nodeId) {
// 获取流程实例对应的流程定义ID
String processDefinitionId = runtimeService.createProcessInstanceQuery()
.processInstanceId(procInstId)
.singleResult()
.getProcessDefinitionId();
ProcessNodeParam param = this.lambdaQuery().eq(ProcessNodeParam::getProcessDefinitionId, processDefinitionId)
ProcessNodeParam param = this.lambdaQuery().eq(ProcessNodeParam::getProcessInstanceId, procInstId)
.eq(ProcessNodeParam::getNodeId, nodeId)
.one();