Merge remote-tracking branch 'origin/main'

This commit is contained in:
2025-12-04 10:00:49 +08:00
25 changed files with 523 additions and 107 deletions

View File

@@ -0,0 +1,18 @@
package com.sdm.common.entity.req.pbs;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
@Data
public class HpcTaskFileDownReq {
@Schema(description = "任务ID")
public String jobId;
@Schema(description = "文件名称")
public String fileName;
@Schema(description = "文件大小")
public Long fileSize;
}

View File

@@ -1,17 +1,15 @@
package com.sdm.common.entity.req.pbs;
import com.baomidou.mybatisplus.annotation.FieldStrategy;
import com.baomidou.mybatisplus.annotation.TableField;
import com.sdm.common.entity.flowable.executeConfig.BaseExecuteConfig;
import com.alibaba.fastjson2.annotation.JSONField;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import org.springframework.web.multipart.MultipartFile;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@Data
public class SubmitHpcTaskRemoteReq extends BaseExecuteConfig {
public class SubmitHpcTaskRemoteReq {
@Schema(description = "配置时的mm时间戳")
public String timesmap;
@@ -31,11 +29,13 @@ public class SubmitHpcTaskRemoteReq extends BaseExecuteConfig {
@Schema(description = "计算任务是否独立存在 0非独立任务 1独立任务")
public int independence;
@Schema(description = "求解文件featchFileType =beforeNode 时传递")
public List<String> inputFiles = new ArrayList<>();
@Schema(description = "求解文件")
@JSONField(serialize = false)
public List<MultipartFile> inputFiles = new ArrayList<>();
@Schema(description = "计算主文件")
public String masterFile;
@JSONField(serialize = false)
public MultipartFile masterFile;
@Schema(description = "计算任务所属任务ID")
public String taskId;
@@ -52,16 +52,19 @@ public class SubmitHpcTaskRemoteReq extends BaseExecuteConfig {
@Schema(description = "执行的命令")
public String command;
@Schema(description = "命令执行输出文件名xx.out")
public String stdout;
@Schema(description = "任务所属项目")
public String projectname;
@Schema(description = "获取文件的方式上一节点beforeNode,hpc节点文件提前上传工作目录hpcNode")
public String featchFileType;
// @Schema(description = "获取文件的方式上一节点beforeNode,hpc节点文件提前上传工作目录hpcNode")
// public String featchFileType;
//
// @Schema(description = "上一节点Id,featchFileType:beforeNode时传递 ")
// public String beforeNodeId;
@Schema(description = "上一节点Id,featchFileType:beforeNode时传递 ")
public String beforeNodeId;
@Schema(description= "自定义占位符,只有列表展示使用key 就是占位符")
private Map<String,SimulationCommandPlaceholderReq> commandExpand;
// @Schema(description= "自定义占位符,只有列表展示使用key 就是占位符")
// private Map<String,SimulationCommandPlaceholderReq> commandExpand;
}

View File

@@ -4,8 +4,8 @@ import com.sdm.common.common.SdmResponse;
import com.sdm.common.config.LongTimeRespFeignConfig;
import com.sdm.common.entity.req.pbs.SubmitHpcTaskRemoteReq;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@FeignClient(
@@ -15,7 +15,7 @@ import org.springframework.web.bind.annotation.RequestBody;
public interface ITaskFeignClient {
// "作业提交"
@PostMapping("/pbs/submitHpcJob")
SdmResponse<String> submitHpcJob(@RequestBody SubmitHpcTaskRemoteReq req);
@PostMapping(value = "/pbs/submitHpcJob", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
SdmResponse<String> submitHpcJob( SubmitHpcTaskRemoteReq req);
}

View File

@@ -9,12 +9,16 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferUtils;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.http.client.MultipartBodyBuilder;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.reactive.function.BodyInserters;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;
@@ -44,6 +48,9 @@ public class HpcCommandExcuteUtil {
@Value("${hpc.remoteDownLoadFileUrl:}")
private String remoteDownLoadFileUrl;
@Value("${hpc.remoteUploadFileUrl:}")
private String remoteUploadFileUrl;
@Value("${hpc.callHpcUpload:}")
private String callHpcUpload;
@@ -137,29 +144,31 @@ public class HpcCommandExcuteUtil {
}
public ResponseEntity<StreamingResponseBody> hpcDownloadFile(String path, Long fileSize) {
// 从 path 中提取文件名
String fileName = extractFileName(path);
String encodedFileName = URLEncoder.encode(fileName, StandardCharsets.UTF_8);
StreamingResponseBody body = outputStream -> {
// 构建完整 URL并安全编码 path
String url = remoteDownLoadFileUrl + "?path=" + URLEncoder.encode(path, StandardCharsets.UTF_8);
// 调用 B 服务并流式写出
DataBufferUtils.write(
webClient.get()
.uri(remoteDownLoadFileUrl, path)
.uri(url)
.retrieve()
.bodyToFlux(DataBuffer.class),
Channels.newChannel(outputStream)
).blockLast(); // 阻塞直到写完
).blockLast();
};
// 构建 ResponseEntity
ResponseEntity.BodyBuilder builder = ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION,
"attachment; filename*=UTF-8''" + encodedFileName)
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename*=UTF-8''" + encodedFileName)
.contentType(MediaType.APPLICATION_OCTET_STREAM);
// 只有在 fileSize 合法时才设置 Content-Length
if (fileSize != null && fileSize > 0) {
builder.contentLength(fileSize);
}
return builder.body(body);
}
@@ -195,6 +204,31 @@ public class HpcCommandExcuteUtil {
return lastSlash >= 0 ? path.substring(lastSlash + 1) : path;
}
// 调用工具上传hpc文件
public String uploaHpcFile(MultipartFile file, String subDir) {
try {
// 3. Multipart body 构建
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("file", new ByteArrayResource(file.getBytes()) {
@Override
public String getFilename() {
return file.getOriginalFilename();
}
});
builder.part("subDir", subDir);
// 4. 调用 B 服务上传接口
String uploadResult = webClient.post()
.uri(remoteUploadFileUrl)
.contentType(MediaType.MULTIPART_FORM_DATA)
.body(BodyInserters.fromMultipartData(builder.build()))
.retrieve()
.bodyToMono(String.class)
.block();
return uploadResult;
} catch (Exception e) {
System.out.println("上传失败");
return "";
}
}
}

View File

@@ -447,7 +447,7 @@ public class DataFileController implements IDataFeignClient {
*
* @param fileId
*/
@PostMapping("/downloadFileForEdit")
@GetMapping("/downloadFileForEdit")
@Operation(summary = "根据fileId下载文件到指定目录并返回该文件的系统路径", description = "根据fileId下载文件到指定目录并返回该文件的系统路径")
public SdmResponse downloadFileForEdit(@RequestParam(value = "fileId") @Validated Long fileId) {
return IDataFileService.downloadFileForEdit(fileId);

View File

@@ -15,6 +15,7 @@ import java.util.List;
* @since 2025-09-05
*/
public interface FileMetadataInfoMapper extends BaseMapper<FileMetadataInfo> {
List<FileMetadataInfo> listSimulationNodeFiles(@Param("parentId") Long parentId,@Param("fileIds") List<Long> fileIds);
List<FileMetadataInfo> listSimulationNodeDir(@Param("dirIds") List<Long> dirIds,@Param("filterEmptyData") boolean filterEmptyData);
List<FileMetadataInfo> listSimulationNodeFiles(@Param("parentId") Long parentId,@Param("fileIds") List<Long> fileIds, @Param("filterEmptyData") boolean isFilterEmptyData);
}

View File

@@ -12,11 +12,17 @@ public class ListSimulationNodeTreeReq extends BaseReq {
*/
@Schema(description = "数据展示维度模版ID")
@NotEmpty(message = "数据展示维度模版ID不能为空")
private Long dimensionTemplateId;
Long dimensionTemplateId;
/**
* 文件夹ID
*/
@Schema(description = "文件夹ID")
Long fileId;
/**
* 是否过滤空数据
*/
@Schema(description = "是否过滤空数据")
boolean filterEmptyData =false;
}

View File

@@ -1,8 +1,6 @@
package com.sdm.data.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.sdm.data.model.dto.NodeSizeDTO;
import com.sdm.data.model.dto.UserTotalFileSizeDTO;
import com.sdm.data.model.entity.FileMetadataInfo;
import java.util.List;
@@ -16,5 +14,10 @@ import java.util.List;
* @since 2025-09-05
*/
public interface IFileMetadataInfoService extends IService<FileMetadataInfo> {
List<FileMetadataInfo> listSimulationNodeFiles(Long parentId,List<Long> fileIds);
/**
* 数据总览 需要过滤空文件夹查询文件夹
* @return
*/
List<FileMetadataInfo> listSimulationNodeDir(List<Long> dirIds,boolean filterEmptyData);
List<FileMetadataInfo> listSimulationNodeFiles(Long parentId, List<Long> fileIds, boolean isFilterEmptyData);
}

View File

@@ -216,10 +216,10 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
// 获取dimensionNodeTyepOrderList的index+1位置的节点类型
String nextNodeType = dimensionNodeTyepOrderList.get(index + 1);
SdmResponse<List<AllNodeByProjectIdAndTypeResp>> allNodeByProjectIdAndType = simuluationNodeFeignClient.getAllNodeByProjectIdAndType(chooseUuid, nextNodeType);
if (!allNodeByProjectIdAndType.isSuccess()) {
return SdmResponse.success();
if (allNodeByProjectIdAndType.isSuccess() && ObjectUtils.isNotEmpty(allNodeByProjectIdAndType.getData())) {
uuids.addAll(allNodeByProjectIdAndType.getData().stream().map(AllNodeByProjectIdAndTypeResp::getUuid).toList());
}
uuids.addAll(allNodeByProjectIdAndType.getData().stream().map(AllNodeByProjectIdAndTypeResp::getUuid).toList());
}
}
@@ -241,7 +241,7 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
return SdmResponse.success(resultDir);
}
List<FileMetadataInfo> nodeDirInfos = fileMetadataInfoService.lambdaQuery().in(FileMetadataInfo::getRelatedResourceUuid, uuids).list();
List<FileMetadataInfo> nodeDirInfos = fileMetadataInfoService.lambdaQuery().in(FileMetadataInfo::getRelatedResourceUuid, uuids).orderByDesc(FileMetadataInfo::getCreateTime).list();
resultDir.addAll(nodeDirInfos);
return SdmResponse.success(resultDir);
}
@@ -269,7 +269,7 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
}
PageHelper.startPage(req.getCurrent(), req.getSize());
List<FileMetadataInfo> fileMetadataInfos = fileMetadataInfoService.lambdaQuery().in(FileMetadataInfo::getId, dirInfos).list();
List<FileMetadataInfo> fileMetadataInfos = fileMetadataInfoService.listSimulationNodeDir(dirInfos, req.isFilterEmptyData());
PageInfo<FileMetadataInfo> page = new PageInfo<>(fileMetadataInfos);
return PageUtils.getJsonObjectSdmResponse(fileMetadataInfos, page);
@@ -293,7 +293,7 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
PageHelper.startPage(req.getCurrent(), req.getSize());
List<FileMetadataInfo> fileMetadataInfos = fileMetadataInfoService.listSimulationNodeFiles(parentDirId, dirInfos);
List<FileMetadataInfo> fileMetadataInfos = fileMetadataInfoService.listSimulationNodeFiles(parentDirId, dirInfos,req.isFilterEmptyData());
PageInfo<FileMetadataInfo> page = new PageInfo<>(fileMetadataInfos);
return PageUtils.getJsonObjectSdmResponse(fileMetadataInfos, page);

View File

@@ -2,11 +2,8 @@ package com.sdm.data.service.impl;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.sdm.data.dao.FileMetadataInfoMapper;
import com.sdm.data.model.dto.NodeSizeDTO;
import com.sdm.data.model.dto.UserTotalFileSizeDTO;
import com.sdm.data.model.entity.FileMetadataInfo;
import com.sdm.data.service.IFileMetadataInfoService;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Service;
import java.util.List;
@@ -22,7 +19,12 @@ import java.util.List;
@Service
public class FileMetadataInfoServiceImpl extends ServiceImpl<FileMetadataInfoMapper, FileMetadataInfo> implements IFileMetadataInfoService {
@Override
public List<FileMetadataInfo> listSimulationNodeFiles(Long parentId, List<Long> fileIds) {
return this.baseMapper.listSimulationNodeFiles(parentId,fileIds);
public List<FileMetadataInfo> listSimulationNodeDir(List<Long> dirIds,boolean filterEmptyData) {
return this.baseMapper.listSimulationNodeDir(dirIds, filterEmptyData);
}
@Override
public List<FileMetadataInfo> listSimulationNodeFiles(Long parentId, List<Long> fileIds, boolean isFilterEmptyData) {
return this.baseMapper.listSimulationNodeFiles(parentId,fileIds,isFilterEmptyData);
}
}

View File

@@ -152,9 +152,9 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
// return type;
// }
private static final String TEMP_FILE_PATH = "D:\\nginx-1.28.0\\html\\static\\";
private static final String TEMP_FILE_PATH = "/usr/local/nginx/html/storage/";
private static final String TEMP_NG_URL = "http://192.168.65.199:10031/static/";
private static final String TEMP_NG_URL = "http://192.168.65.161:10031/storage/";
@Override
@Transactional(rollbackFor = Exception.class)
@@ -2418,6 +2418,8 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
@Override
public ResponseEntity<Object> onlyOfficeCallback(CallbackData callbackData) {
log.info("调用回调接口,url{}",callbackData.getUrl());
log.info("调用回调接口key{}",callbackData.getKey());
log.info("调用回调接口status{}",callbackData.getStatus());
//状态监听
//参见https://api.onlyoffice.com/editors/callback
Integer status = callbackData.getStatus();
@@ -2451,13 +2453,13 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
}
case 6: {
//document is being edited, but the current document state is saved,编辑保存
String prefixUrl = "D:\\software\\docker_file\\onlyoffice-de\\lib\\documentserver\\App_Data\\cache\\files\\data";
String prefixUrl = "/home/onlyoffice/lib/documentserver/App_Data/cache/files/data";
String url = callbackData.getUrl();
url = url.substring(url.indexOf("data") + 5,url.indexOf("output.docx") - 1);
log.info("动态路径为:{}",url);
url = prefixUrl + File.separator + url + File.separator + "output.docx";
log.info("最终路径为:{}",url);
String fileId = callbackData.getKey();
String fileId = callbackData.getKey().split("_")[0];
try {
Path path = Paths.get(url);
uploadNewFile(fileId,path);
@@ -2529,6 +2531,8 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
outputStream.close();
JSONObject jsonObject = new JSONObject();
jsonObject.put("url",TEMP_NG_URL + randomId + "/" + fileMetadataInfo.getOriginalName());
jsonObject.put("fileName",fileMetadataInfo.getOriginalName());
jsonObject.put("fileType",fileMetadataInfo.getOriginalName().substring(fileMetadataInfo.getOriginalName().lastIndexOf(".") + 1));
return SdmResponse.success(jsonObject);
} catch (Exception e) {
log.error("onlyoffice编辑文件失败", e);

View File

@@ -1,9 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.sdm.data.dao.FileMetadataInfoMapper">
<select id="listSimulationNodeDir" resultType="com.sdm.data.model.entity.FileMetadataInfo">
SELECT distinct file_metadata_info.*
FROM file_metadata_info
<if test="filterEmptyData != null and filterEmptyData">
LEFT JOIN file_storage ON file_metadata_info.id = file_storage.dirId
</if>
WHERE file_metadata_info.id IN (
<foreach collection="dirIds" item="dirId" separator=",">
#{dirId}
</foreach>
)
<if test="filterEmptyData != null and filterEmptyData">
AND file_storage.fileId IS NOT NULL
</if>
</select>
<select id="listSimulationNodeFiles" resultType="com.sdm.data.model.entity.FileMetadataInfo">
<!-- 第一个子查询文件(必执行) -->
SELECT *
SELECT distinct file_metadata_info.*
FROM file_metadata_info
WHERE parentId = #{parentId}
AND dataType = 2
@@ -13,13 +31,20 @@
<if test="fileIds != null">
UNION ALL
(
SELECT *
SELECT file_metadata_info.*
FROM file_metadata_info
WHERE id IN (
<if test="filterEmptyData != null and filterEmptyData">
LEFT JOIN file_storage ON file_metadata_info.id = file_storage.dirId
</if>
WHERE file_metadata_info.id IN (
<foreach collection="fileIds" item="fileId" separator=",">
#{fileId}
</foreach>
)
<if test="filterEmptyData != null and filterEmptyData">
AND file_storage.fileId IS NOT NULL
</if>
)
</if>

View File

@@ -1,9 +1,14 @@
package com.sdm.flowable.delegate.handler;
import com.alibaba.fastjson2.JSONObject;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.entity.flowable.executeConfig.HPCExecuteConfig;
import com.sdm.common.entity.req.pbs.SimulationCommandPlaceholderReq;
import com.sdm.common.entity.req.pbs.SubmitHpcTaskRemoteReq;
import com.sdm.common.feign.inter.pbs.ITaskFeignClient;
import com.sdm.common.log.CoreLogger;
import com.sdm.flowable.service.IAsyncTaskRecordService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
@@ -12,14 +17,14 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
// HPC(executeType=HPC)
@Slf4j
@Component("HPC")
public class HpcHandler implements ExecutionHandler<SubmitHpcTaskRemoteReq,HPCExecuteConfig> {
public class HpcHandler implements ExecutionHandler<Map<String, Object>,HPCExecuteConfig> {
@Autowired
private IAsyncTaskRecordService asyncTaskRecordService;
@@ -27,14 +32,19 @@ public class HpcHandler implements ExecutionHandler<SubmitHpcTaskRemoteReq,HPCEx
@Autowired
private ITaskFeignClient taskFeignClient;
/*
* params:业务参数
* config框架属性
* */
@Override
public void execute(DelegateExecution execution, SubmitHpcTaskRemoteReq params, HPCExecuteConfig config) {
params.setBeforeNodeId(config.getBeforeNodeId());
public void execute(DelegateExecution execution, Map<String, Object> params, HPCExecuteConfig config) {
SubmitHpcTaskRemoteReq submitHpcTaskRemoteReq = convertParamsToReq(params);
// submitHpcTaskRemoteReq.setBeforeNodeId(config.getBeforeNodeId());
// 实现HPC处理逻辑...
// INIT(初始化)/RUNNING(执行中)/SUCCESS(执行成功)/FAIL(执行失败)
String status = "INIT";
// 1. 调用 HPC 平台提交任务
SdmResponse<String> submitResp = taskFeignClient.submitHpcJob(params);
SdmResponse<String> submitResp = taskFeignClient.submitHpcJob(submitHpcTaskRemoteReq);
if(!submitResp.isSuccess()|| StringUtils.isBlank(submitResp.getData())){
log.error("HpcHandler submit failed,jobName:{}",params);
status = "FAIL";
@@ -53,17 +63,107 @@ public class HpcHandler implements ExecutionHandler<SubmitHpcTaskRemoteReq,HPCEx
log.info("HPC 任务 {} 已提交", hpcTaskId);
}
/**
* 将参数Map转换为SubmitHpcTaskRemoteReq对象的工具方法
*/
private SubmitHpcTaskRemoteReq convertParamsToReq(Map<String, Object> params) {
SubmitHpcTaskRemoteReq req = new SubmitHpcTaskRemoteReq();
if (params == null) {
return req;
}
ObjectMapper objectMapper = new ObjectMapper(); // 需确保ObjectMapper已配置或注入
// 基础字段映射
req.setTimesmap(params.get("timesmap").toString());
req.setJobName(params.get("jobName").toString());
// 处理int类型字段包含空值和非数字的异常处理
try {
req.setCoreNum(params.get("coreNum") != null ? Integer.parseInt(params.get("coreNum").toString()) : 0);
} catch (NumberFormatException e) {
CoreLogger.error("coreNum parse error:{},coreNum:{}",e.getMessage(),params.get("coreNum"));
req.setCoreNum(0);
}
req.setSoftware(params.get("software").toString());
req.setJobType(params.get("jobType").toString());
try {
req.setIndependence(params.get("independence") != null ? Integer.parseInt(params.get("independence").toString()) : 0);
} catch (NumberFormatException e) {
req.setIndependence(0);
}
req.setTaskId(params.get("taskId").toString());
req.setTaskName(params.get("taskName").toString());
req.setRunId(params.get("runId").toString());
req.setRunName(params.get("runName").toString());
req.setCommand(params.get("command").toString());
req.setProjectname(params.get("projectname").toString());
// req.setFeatchFileType(params.get("featchFileType").toString());
// req.setBeforeNodeId(params.get("beforeNodeId").toString());
// 处理commandExpand字段JSON字符串转Map
String commandExpandJson = params.get("commandExpand").toString();
if (StringUtils.isNotBlank(commandExpandJson)) {
try {
// 将JSON字符串转换为Map<String, SimulationCommandPlaceholderReq>
Map<String, SimulationCommandPlaceholderReq> commandExpand = objectMapper.readValue(
commandExpandJson,
new TypeReference<Map<String, SimulationCommandPlaceholderReq>>() {}
);
// req.setCommandExpand(commandExpand);
} catch (Exception e) {
CoreLogger.error("convertParamsToReq error:{},params:{}",e.getMessage(), JSONObject.toJSONString(params));
// 如设为null或空Map
// req.setCommandExpand(new HashMap<>());
}
}
return req;
}
public String mockinit(){
SubmitHpcTaskRemoteReq mockReq = mockSubmitHpcTaskReq();
SdmResponse<String> submitResp = taskFeignClient.submitHpcJob(mockReq);
if(!submitResp.isSuccess()|| StringUtils.isBlank(submitResp.getData())){
log.error("HpcHandler submit failed,jobName:{}",mockReq.getJobName());
System.out.println("失败");
return "失败";
}
String hpcTaskId = submitResp.getData();
return hpcTaskId;
// SubmitHpcTaskRemoteReq mockReq = mockSubmitHpcTaskReq();
// SdmResponse<String> submitResp = taskFeignClient.submitHpcJob(mockReq);
// if(!submitResp.isSuccess()|| StringUtils.isBlank(submitResp.getData())){
// log.error("HpcHandler submit failed,jobName:{}",mockReq.getJobName());
// System.out.println("失败");
// return "失败";
// }
// String hpcTaskId = submitResp.getData();
Map<String, Object> params = getParams();
HPCExecuteConfig hpcExecuteConfig = new HPCExecuteConfig();
// todo `flowable`.`process_node_param`
hpcExecuteConfig.setBeforeNodeId("uuid-node-8d3e61e7-1374-419c-9e46-210cb88c1113");
execute(null,params,hpcExecuteConfig);
return "ok";
}
private Map<String,Object> getParams() {
Map<String, Object> params = new HashMap<>();
// 基础字段
params.put("timesmap", String.valueOf(System.currentTimeMillis())); // 示例时间戳2025-07-29 00:00:00
params.put("jobName", "HPC-数据处理作业-"+ System.currentTimeMillis());
params.put("coreNum", 32);
params.put("software", "reta.exe");
params.put("jobType", "流体动力学仿真");
params.put("independence", 1);
params.put("taskId", "123456");
params.put("taskName", "锂电池热管理系统研发");
params.put("runId", "55555");
params.put("runName", "HPC-电池");
// params.put("command", "\\\\CARSAFE\\share\\solver\\RLithium\\reta.exe -i %retaFile");
params.put("command", "\\\\CARSAFE\\share\\solver\\RLithium\\reta.exe -i .\\model\\aa.xml");
params.put("projectname", "新能源汽车锂电池安全性能优化项目");
params.put("featchFileType", "hpcNode"); // 补充示例值
params.put("beforeNodeId", null); // 示例空值
// commandExpand去掉outName后的JSON字符串
String commandExpandJson = "{\n" +
" \"retaFile\": {\n" +
" \"id\": 1,\n" +
" \"keyEnName\": \"retaFile\",\n" +
" \"keyCnName\": \"电池求解文件\",\n" +
" \"valueType\": \"file_regex_match\",\n" +
" \"inputValue\": \"*.jpg\"\n" +
" }\n" +
"}";
params.put("commandExpand", commandExpandJson);
return params;
}
private SubmitHpcTaskRemoteReq mockSubmitHpcTaskReq() {
@@ -76,8 +176,8 @@ public class HpcHandler implements ExecutionHandler<SubmitHpcTaskRemoteReq,HPCEx
req.software = "reta.exe";
req.jobType = "仿真计算";
req.independence = 1; // 独立任务
req.inputFiles = Arrays.asList("input1.dat", "input2.dat", "input3.dat");
req.masterFile = "master.dat";
// req.inputFiles = Arrays.asList("input1.dat", "input2.dat", "input3.dat");
// req.masterFile = "master.dat";
req.taskId = "TASKID_" + timestamp;
req.taskName = "测试任务_" + timestamp;
req.runId = "RUNID_" + timestamp;

View File

@@ -1,3 +1,3 @@
spring:
profiles:
active: local
active: yang

View File

@@ -1,6 +1,7 @@
package com.sdm.pbs.controller;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.entity.req.pbs.HpcTaskFileDownReq;
import com.sdm.common.entity.req.pbs.HpcTaskFileReq;
import com.sdm.common.entity.req.pbs.SubmitHpcTaskRemoteReq;
import com.sdm.common.entity.req.pbs.hpc.*;
@@ -28,6 +29,7 @@ import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
@@ -71,9 +73,9 @@ public class TaskController implements ITaskFeignClient {
return pbsService.queryHpcResource();
}
@PostMapping("/submitHpcJob")
@PostMapping(value = "/submitHpcJob", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
@Operation(summary = "作业提交")
public SdmResponse<String> submitHpcJob(@RequestBody SubmitHpcTaskRemoteReq req) {
public SdmResponse<String> submitHpcJob(SubmitHpcTaskRemoteReq req) {
SubmitHpcTaskReq submitHpcTaskReq = new SubmitHpcTaskReq();
BeanUtils.copyProperties(req,submitHpcTaskReq);
return pbsService.submitHpcJob(submitHpcTaskReq);
@@ -97,10 +99,10 @@ public class TaskController implements ITaskFeignClient {
return pbsService.getJobResultFiles(req.getJobId(),req.getTargetDir());
}
@GetMapping("/hpcDownloadFile")
@PostMapping("/hpcDownloadFile")
@Operation(summary = "作业下文件下载")
ResponseEntity<StreamingResponseBody> hpcDownloadFile(@RequestParam String jobId,@RequestParam String fileName,@RequestParam Long fileSize) {
return pbsService.downloadFile(jobId,fileName,fileSize);
ResponseEntity<StreamingResponseBody> hpcDownloadFile(@RequestBody HpcTaskFileDownReq req) {
return pbsService.downloadFile(req.getJobId(),req.getFileName(),req.getFileSize());
}
@PostMapping("/queryJobs")

View File

@@ -1,7 +1,9 @@
package com.sdm.pbs.model.req;
import com.alibaba.fastjson2.annotation.JSONField;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import org.springframework.web.multipart.MultipartFile;
import java.util.ArrayList;
import java.util.List;
@@ -24,10 +26,18 @@ public class SubmitHpcTaskReq {
public boolean independence;
@Schema(description = "求解文件")
public List<String> inputFiles = new ArrayList<>();
@JSONField(serialize = false)
public List<MultipartFile> inputFiles = new ArrayList<>();
@Schema(description = "求解文件路径")
public List<String> inputFilePaths = new ArrayList<>();
@Schema(description = "计算主文件")
public String masterFile;
@JSONField(serialize = false)
public MultipartFile masterFile;
@Schema(description = "主文件上传后的路径")
public String masterFilePath;
@Schema(description = "计算任务所属任务ID")
public String taskId;
@@ -44,6 +54,12 @@ public class SubmitHpcTaskReq {
@Schema(description = "执行的命令")
public String command;
@Schema(description = "命令执行输出文件名xx.out")
public String stdout;
@Schema(description = "工作目录,代码逻辑生成,和求解主文件平级")
public String workDir;
@Schema(description = "任务所属项目")
public String projectname;

View File

@@ -118,7 +118,8 @@ public class HpcInstructionServiceImpl implements HpcInstructionService {
String prefixStr = HpcCommandBuilderUtil.initAddJobPrefixStr(req.getJobId());
AddJobParam addJobParam = new AddJobParam();
BeanUtils.copyProperties(req, addJobParam);
String targetWorkDir = addJobParam.getWorkdir() + "\\" + req.getJobId();
// String targetWorkDir = addJobParam.getWorkdir() + "\\" + req.getJobId();
String targetWorkDir = addJobParam.getWorkdir();
Pair<Boolean, String> workDirPair = createDirIfNotExist(targetWorkDir);
if(!workDirPair.getLeft()){
AddJobResp addJobResp=new AddJobResp();

View File

@@ -65,11 +65,8 @@ public class IPbsHpcServiceImpl implements IPbsService {
newJobReq.setProjectname(req.getProjectname());
AddJobReq addJobReq = new AddJobReq();
addJobReq.setName(req.getRunName());
// todo
addJobReq.setStdout("1126.out");
// todo
addJobReq.setWorkdir("\\\\CARSAFE\\share\\spdm");
// todo
addJobReq.setStdout(req.getStdout());
addJobReq.setWorkdir(req.getWorkDir());
addJobReq.setCommand(req.getCommand());
SubmitHpcJobReq submitHpcJobReq = new SubmitHpcJobReq();
mergeSubmitHpcJobReq.setNewJobReq(newJobReq);

View File

@@ -7,6 +7,7 @@ import com.github.pagehelper.PageInfo;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.entity.resp.PageDataResp;
import com.sdm.common.entity.resp.pbs.hpc.FileNodeInfo;
import com.sdm.common.utils.HpcCommandExcuteUtil;
import com.sdm.common.utils.PageUtils;
import com.sdm.pbs.model.bo.HpcJobStatusInfo;
import com.sdm.pbs.model.bo.HpcResouceInfo;
@@ -17,12 +18,14 @@ import com.sdm.pbs.model.req.JobFileCallBackReq;
import com.sdm.pbs.model.req.QueryJobReq;
import com.sdm.pbs.model.req.SubmitHpcTaskReq;
import com.sdm.pbs.service.*;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;
import java.time.LocalDateTime;
@@ -36,6 +39,9 @@ import java.util.stream.Collectors;
@ConditionalOnProperty(name = "pbs.task.impl", havingValue = "hpc")
public class PbsServiceDecorator implements IPbsServiceDecorator {
@Autowired
private HpcCommandExcuteUtil hpcCommandExcuteUtil;
// 正则匹配%后的单词(\w+ 匹配字母、数字、下划线)
private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("%(\\w+)");
@@ -67,9 +73,24 @@ public class PbsServiceDecorator implements IPbsServiceDecorator {
@Override
public SdmResponse<String> submitHpcJob(SubmitHpcTaskReq req) {
// SdmResponse<String> response = pbsService.submitHpcJob(req);
// if(response.isSuccess()&&StringUtils.isNotEmpty(response.getData())) {
String jobId = "8848";
//1. 上传hpc主文件 及 其他文件
MultipartFile masterFile = req.getMasterFile();
String subDir = req.getJobName()+"\\"+System.currentTimeMillis();
// webClient 调用上传这个是主文件求解算出的文件及stdout文件都指定这个文件夹下面
String masterFilePath = hpcCommandExcuteUtil.uploaHpcFile(masterFile,subDir);
dealInputFiles(req,subDir);
// 任务输出的文件夹
String hpcOutPutDir = extractDirectory(masterFilePath);
req.setWorkDir(hpcOutPutDir);
// 前置处理 替换求解文件
String formatCommand = String.format(req.getCommand(), masterFilePath);
req.setCommand(formatCommand);
req.setMasterFilePath(masterFilePath);
SdmResponse<String> response = pbsService.submitHpcJob(req);
String jobId="";
if(response.isSuccess()&&StringUtils.isNotEmpty(response.getData())) {
jobId = response.getData();
}
if(StringUtils.isNotEmpty(jobId)) {
// 数据入库
SimulationJob simulationJob = new SimulationJob();
@@ -80,13 +101,15 @@ public class PbsServiceDecorator implements IPbsServiceDecorator {
simulationJob.setSoftware(req.getSoftware());
simulationJob.setJobType(req.getJobType());
simulationJob.setIndependence(req.isIndependence());
simulationJob.setInputFiles(JSONObject.toJSONString(req.getInputFiles()));
simulationJob.setMasterFile(req.getMasterFile());
// simulationJob.setInputFiles(JSONObject.toJSONString(req.getInputFiles()));
// 主文件位置 todo
simulationJob.setMasterFile(req.getMasterFilePath());
// 求解文件集合
simulationJob.setInputFiles(JSONObject.toJSONString(req.getInputFilePaths()));
simulationJob.setTaskId(req.getTaskId());
simulationJob.setTaskName(req.getTaskName());
simulationJob.setRunId(req.getRunId());
simulationJob.setRunName(req.getRunName());
// 软件及文件关联
simulationJob.setSoftwareId(req.getSoftwareId());
// 下面的待定 todo
@@ -94,11 +117,9 @@ public class PbsServiceDecorator implements IPbsServiceDecorator {
simulationJob.setJobId(jobId);
// 没必要要
simulationJob.setJobDetailId("todo");
// 文件路径 todo 共享目录+jobName文件回传)+uuid下面可能有多个文件
simulationJob.setStdoutHpcFilePath("/hpc/shared/job001/uuid-123");
simulationJob.setStdoutHpcFilePath(hpcOutPutDir);
simulationJob.setStdoutSpdmFilePath("/minio/base/job001/uuid-123");
// todo 执行信息 定时任务回传的时候修改
simulationJob.setNodeName("todo");
simulationJob.setExecutCommand("ansys -b -input input.dat -output output.log");
@@ -108,11 +129,10 @@ public class PbsServiceDecorator implements IPbsServiceDecorator {
simulationJob.setJobStatus("Configuring");
// ? todo 没比要
simulationJob.setSolverName("LS-DYNA");
// todo 执行信息 定时任务回传的时候修改
simulationJob.setTotalKernelTime(3600000L);
simulationJob.setTotalUserTime(7200000L);
simulationJob.setTotalElapsedTime(9000L);
simulationJob.setTotalKernelTime(null);
simulationJob.setTotalUserTime(null);
simulationJob.setTotalElapsedTime(null);
// 标识及状态
simulationJob.setUuid("f81d4fae7dec11d0a76500a0c91e6bf6");
@@ -129,6 +149,41 @@ public class PbsServiceDecorator implements IPbsServiceDecorator {
return SdmResponse.success(jobId);
}
private void dealInputFiles(SubmitHpcTaskReq req, String subDir) {
if(req.getInputFiles()==null|| CollectionUtils.isEmpty(req.getInputFiles())) {
return;
}
List<MultipartFile> inputFiles = req.getInputFiles();
List<String> list = new ArrayList<>();
for (MultipartFile inputFile : inputFiles) {
String inputFilePath = hpcCommandExcuteUtil.uploaHpcFile(inputFile,subDir);
list.add(inputFilePath);
}
req.setInputFilePaths(list);
}
/**
* 从文件路径中提取目录部分(包含最后一个路径分隔符)
* @param fullPath 完整的文件路径
* @return 目录路径(包含最后一个反斜杠),若路径为空或无分隔符则返回原路径
*/
private String extractDirectory(String fullPath) {
// 校验参数
if (fullPath == null || fullPath.isEmpty()) {
return fullPath;
}
// 找到最后一个反斜杠的位置
int lastSeparatorIndex = fullPath.lastIndexOf("\\");
// 若没有找到分隔符,返回原路径;否则截取到最后一个分隔符(包含)
if (lastSeparatorIndex == -1) {
return fullPath;
}
return fullPath.substring(0, lastSeparatorIndex + 1);
}
@Override
public SdmResponse<Boolean> stopHpcJob(String jobId) {
return pbsService.stopHpcJob(jobId);
@@ -142,9 +197,22 @@ public class PbsServiceDecorator implements IPbsServiceDecorator {
@Override
public SdmResponse<List<FileNodeInfo>> getJobResultFiles(String jobId,String targetDir) {
// todo 根据jobId 获取工作目录,共享目录+jobName文件回传)+uuid下面可能有多个文件
String workDir = StringUtils.isNotBlank(targetDir) ? targetDir :"D:\\需求";
SdmResponse<List<FileNodeInfo>> nodeInfos = pbsService.getJobResultFiles("", workDir);
// 根据 jobId 获取工作信息
SimulationJob simulationJob = simulationJobService.lambdaQuery()
.eq(SimulationJob::getJobId, jobId)
.one();
// 选择 queryPath
String queryPath;
if (targetDir != null && !targetDir.isEmpty()) {
queryPath = targetDir;
} else if (simulationJob!=null&&simulationJob.getStdoutHpcFilePath() != null && !simulationJob.getStdoutHpcFilePath().isEmpty()) {
queryPath = simulationJob.getStdoutHpcFilePath();
} else {
return SdmResponse.failed("查询路径为空,无法获取文件");
}
// 调用 PBS 服务获取文件列表
SdmResponse<List<FileNodeInfo>> nodeInfos = pbsService.getJobResultFiles("", queryPath);
return nodeInfos;
}

View File

@@ -108,6 +108,8 @@ hpc:
remoteCreateDirUrl: http://192.168.65.55:9097/createDir
remoteScanDirUrl: http://192.168.65.55:9097/scanDir
remoteDownLoadFileUrl: http://192.168.65.55:9097/hpcDownload
# remoteDownLoadFileUrl: http://127.0.0.1:9097/hpcDownload
remoteUploadFileUrl: http://192.168.65.55:9097/uploadHpcFile
callHpcUpload: http://192.168.65.55:9097/addJobQueue

View File

@@ -0,0 +1,25 @@
package com.sdm.project.model.req;
import lombok.Data;
import java.io.Serializable;
@Data
public class PerformanceInfoReq implements Serializable {
private static final long serialVersionUID = 1L;
private String value;
private String englishName;
private String highValue;
private String performanceName;
private String method;
private String unit;
}

View File

@@ -0,0 +1,47 @@
package com.sdm.project.model.req;
import jakarta.validation.constraints.NotNull;
import lombok.Data;
import java.io.Serializable;
@Data
public class ProjecInfoReq implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 部门
*/
private String department;
private String applicants;
private String date;
private String projectNum;
private String workspaceNum;
private String workspace;
private String taskType;
private String reportVer;
private String fileNum;
private String formulateTime;
private String checkTime;
private String approveTime;
private Boolean isBatch = false;
private String loadcaseName;
private String reportCommand;
}

View File

@@ -25,8 +25,8 @@ public class SpdmReportReq {
private List<SimulationPerformance> performanceList;
/**
* 算例父节点信息集合
* 算例父节点信息
*/
private List<TaskNode> parentNodeInfoList;
private ProjecInfoReq projecInfoReq;
}

View File

@@ -14,6 +14,7 @@ import com.sdm.common.entity.constants.TagConstant;
import com.sdm.common.entity.enums.DirTypeEnum;
import com.sdm.common.entity.enums.NodeTypeEnum;
import com.sdm.common.entity.req.data.CreateDirReq;
import com.sdm.common.entity.req.data.DelDirReq;
import com.sdm.common.entity.req.data.RenameDirReq;
import com.sdm.common.entity.req.data.UploadFilesReq;
import com.sdm.common.entity.req.project.*;
@@ -199,10 +200,26 @@ public class NodeServiceImpl extends ServiceImpl<SimulationNodeMapper, Simulatio
TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
return SdmResponse.failed("操作节点失败,原因:删除节点时失败!");
}
deleteDirNew(deleteNodeIdList);
}
return SdmResponse.success();
}
// 删除文件夹
public boolean deleteDirNew(List<String> uuidList) {
for (String uuid : uuidList) {
DelDirReq req = new DelDirReq();
req.setDelUuid(uuid);
log.info("调用删除文件夹的参数为:{}", req);
SdmResponse response = dataClientFeignClient.delDir(req);
log.info("调用删除文件夹的返回值为:{}", response);
if (response.getCode() != ResultCode.SUCCESS.getCode()) {
return false;
}
}
return true;
}
private boolean deleteNode(List<String> deleteNodeIdList) {
List<ProjectNodePo> projectNodePoList = nodeMapper.allList(deleteNodeIdList);
@@ -235,7 +252,11 @@ public class NodeServiceImpl extends ServiceImpl<SimulationNodeMapper, Simulatio
List<Long> performanceIdList = performanceNodePoList.stream().map(PerformanceNodePo::getId).toList();
nodeMapper.deletePerformanceBatch(performanceIdList);
nodeMapper.deletePerformanceExtraBatch(performanceIdList);
return true;
List<SpdmNodeVo> childrenNodeList = nodeMapper.getNodeListByNodeIdList(deleteNodeIdList);
if (CollectionUtils.isEmpty(childrenNodeList)) {
return true;
}
return deleteNode(childrenNodeList.stream().map(SpdmNodeVo::getUuid).toList());
}
@Override
@@ -764,6 +785,7 @@ public class NodeServiceImpl extends ServiceImpl<SimulationNodeMapper, Simulatio
List<SimulationNode> nodeList = this.lambdaQuery()
.eq(ObjectUtils.isNotEmpty(nodeId), SimulationNode::getId, nodeId)
.eq(ObjectUtils.isNotEmpty(nodeType), SimulationNode::getNodeType, nodeType)
.orderByDesc(SimulationNode::getCreateTime)
.list();
if (CollectionUtils.isEmpty(nodeList)) {
return SdmResponse.failed("未找到节点");
@@ -816,7 +838,7 @@ public class NodeServiceImpl extends ServiceImpl<SimulationNodeMapper, Simulatio
.list();
if (CollectionUtils.isEmpty(simulationNodeList)) {
return SdmResponse.failed("未找到节点");
return SdmResponse.success();
}
List<AllNodeByProjectIdAndTypeResp> allNodeByProjectIdAndTypeRespList = new ArrayList<>();
simulationNodeList.forEach(simulationNode -> {

View File

@@ -79,6 +79,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.stream.Collectors;
@@ -135,6 +136,8 @@ public class SimulationRunServiceImpl extends ServiceImpl<SimulationRunMapper, S
private static final String TEMP_REPORT_PATH = "/opt/report/";
private static final String TEMPLATE_PATH = " /opt/script/template staticsAnalyse";
// 临时文件存储目录
private final String tempFileDir = System.getProperty("user.dir") + "/csv_uploads/";
@@ -1096,12 +1099,54 @@ public class SimulationRunServiceImpl extends ServiceImpl<SimulationRunMapper, S
if (CollectionUtils.isNotEmpty(imageFileIdList)) {
String randomId = RandomUtil.generateString(16);
log.info("临时路径为:{}" , randomId);
String commands = "python /opt/script/exportWord.py " + TEMP_REPORT_PATH + randomId + File.separator + TEMPLATE_PATH;
for (Long fileId : imageFileIdList) {
dataFeignClient.downloadFileToLocal(fileId, TEMP_REPORT_PATH + randomId);
}
// 生成projectInfo.json
// 父节点信息
ProjecInfoReq projecInfoReq = req.getProjecInfoReq();
if (ObjectUtils.isNotEmpty(projecInfoReq)) {
projecInfoReq.setReportCommand(commands);
String jsonString = JSON.toJSONString(projecInfoReq);
FileOutputStream projectInfoOutputStream = null;
try {
projectInfoOutputStream = new FileOutputStream(TEMP_REPORT_PATH + randomId + File.separator + "projectInfo.json");
projectInfoOutputStream.write(jsonString.getBytes(StandardCharsets.UTF_8));
projectInfoOutputStream.flush();
projectInfoOutputStream.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// 生成performance.json
List<SimulationPerformance> performanceList = req.getPerformanceList();
List<PerformanceInfoReq> exportPerformanceList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(performanceList)) {
PerformanceInfoReq performanceInfoReq = new PerformanceInfoReq();
for (SimulationPerformance performance : performanceList) {
performanceInfoReq.setValue(StringUtils.isNotBlank(performance.getTargetValue()) ? performance.getTargetValue() : "");
performanceInfoReq.setEnglishName(StringUtils.isNotBlank(performance.getEnglishName()) ? performance.getEnglishName() : "");
performanceInfoReq.setHighValue(StringUtils.isNotBlank(performance.getHighValue()) ? performance.getHighValue() : "");
performanceInfoReq.setPerformanceName(StringUtils.isNotBlank(performance.getPerformanceName()) ? performance.getPerformanceName() : "");
performanceInfoReq.setMethod(StringUtils.isNotBlank(performance.getMethod()) ? performance.getMethod() : "");
performanceInfoReq.setUnit(StringUtils.isNotBlank(performance.getUnit()) ? performance.getUnit() : "");
exportPerformanceList.add(performanceInfoReq);
}
String jsonString = JSON.toJSONString(exportPerformanceList);
FileOutputStream performanceOutputStream = null;
try {
performanceOutputStream = new FileOutputStream(TEMP_REPORT_PATH + randomId + File.separator + "performance.json");
performanceOutputStream.write(jsonString.getBytes(StandardCharsets.UTF_8));
performanceOutputStream.flush();
performanceOutputStream.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// 调用脚本
log.info("调用脚本中。。。。。。");
String commands = "python /opt/script/exportWord.py " + TEMP_REPORT_PATH + randomId + File.separator;
log.info("command:" + commands);
List<String> result = new ArrayList<>();
int runningStatus = -1;
@@ -1131,7 +1176,7 @@ public class SimulationRunServiceImpl extends ServiceImpl<SimulationRunMapper, S
}
try {
// 获取临时路径中脚本生成的报告
FileInputStream fileInputStream = new FileInputStream(TEMP_REPORT_PATH + randomId + File.separator + "report.docx");
FileInputStream fileInputStream = new FileInputStream(TEMP_REPORT_PATH + randomId + File.separator + "report" + File.separator + "report.docx");
byte[] fileData = fileInputStream.readAllBytes();
// 设置响应头
response.reset();
@@ -1151,11 +1196,6 @@ public class SimulationRunServiceImpl extends ServiceImpl<SimulationRunMapper, S
log.info("删除临时路径:{},中。。。。。。",randomId);
deleteFolder(new File(TEMP_REPORT_PATH + randomId));
}
// 性能指标集合
List<SimulationPerformance> performanceList = req.getPerformanceList();
// 父节点集合
List<TaskNode> parentNodeInfoList = req.getParentNodeInfoList();
}
@Override