This commit is contained in:
2026-02-05 10:52:11 +08:00
14 changed files with 158 additions and 9 deletions

View File

@@ -0,0 +1,2 @@
ALTER TABLE `spdm_baseline`.`simulation_job`
ADD COLUMN `del_flag` CHAR(1) NOT NULL DEFAULT 'N' COMMENT '逻辑删除标识N-未删除Y-已删除';

View File

@@ -49,9 +49,25 @@ public enum DirTypeEnum {
* 报告模板库
*/
@Schema(description = "报告模板库文件夹", example = "8")
REPORT_TEMPLATE_DIR("reportTemplate", 8);
REPORT_TEMPLATE_DIR("reportTemplate", 8),
/*
* 机器人库
*/
@Schema(description = "机器人库文件夹", example = "9")
ROBOT_DIR("robot", 9),
/**
* 工业设计库
*/
@Schema(description = "工业设计库文件夹", example = "10")
INDUSTRIAL_DESIGN_DIR("industrialDesign", 10),
/**
* 公差分析库
*/
@Schema(description = "公差分析库文件夹", example = "11")
TOLERANCE_ANALYSIS_DIR("toleranceAnalysis", 11);
String dirName;
int value;

View File

@@ -0,0 +1,10 @@
package com.sdm.common.entity.req.pbs;
import lombok.Data;
import java.util.List;
@Data
public class DelHpcJobsReq {
private List<String> hpcJobIds;
}

View File

@@ -360,8 +360,16 @@ public class DimensionTemplateServiceImpl extends ServiceImpl<DimensionTemplateM
}
if(CollectionUtils.isNotEmpty(result)) {
// 根据children中的最大创建时间对result进行倒序排序
// 先按dataType排序文件夹在前dataType=1文件在后dataType=2再按children中的最大创建时间倒序排序
result.sort((dto1, dto2) -> {
// 首先按dataType排序文件夹(1)在前,文件(2)在后
Integer dataType1 = dto1.getDataType();
Integer dataType2 = dto2.getDataType();
if (dataType1 != null && dataType2 != null && !dataType1.equals(dataType2)) {
return dataType1.compareTo(dataType2); // 升序1(文件夹) < 2(文件)
}
// dataType相同或都为null时按创建时间倒序排序
LocalDateTime maxCreateTime1 = dto1.getMergeSameNameChildren().stream()
.map(FileMetadataInfoResp::getCreateTime)
.filter(Objects::nonNull)

View File

@@ -875,6 +875,9 @@ public class MinioFileIDataFileServiceImpl implements IDataFileService {
.eq(FileMetadataInfo::getIsLatest, FileIsLastEnum.YES.getValue())
// 审核完成 ,元数据修改审核中,文件修改审核中,删除文件审核中
.in(FileMetadataInfo::getApproveType,fileDatdList)
// 文件夹在前(dataType=1),文件在后(dataType=2),同类型内按名称升序
.orderByAsc(FileMetadataInfo::getDataType)
.orderByDesc(FileMetadataInfo::getCreateTime)
.list();
// 创建人赋值
setCreatorNames(list);

View File

@@ -62,6 +62,6 @@
</if>
<!-- 排序:无论是否联合,都对最终结果排序 -->
ORDER BY updateTime DESC
ORDER BY dataType ASC, updateTime DESC
</select>
</mapper>

View File

@@ -1,6 +1,7 @@
package com.sdm.pbs.controller;
import com.sdm.common.common.SdmResponse;
import com.sdm.common.entity.req.pbs.DelHpcJobsReq;
import com.sdm.common.entity.req.pbs.HpcTaskFileDownReq;
import com.sdm.common.entity.req.pbs.HpcTaskFileReq;
import com.sdm.common.entity.req.pbs.hpc.*;
@@ -13,8 +14,8 @@ import com.sdm.common.entity.resp.pbs.hpc.nodelist.NodeListResp;
import com.sdm.common.utils.HpcCommandExcuteUtil;
import com.sdm.pbs.model.bo.HpcJobStatusInfo;
import com.sdm.pbs.model.bo.HpcResouceInfo;
import com.sdm.pbs.model.entity.SimulationJob;
import com.sdm.pbs.model.entity.SimulationHpcCommand;
import com.sdm.pbs.model.entity.SimulationJob;
import com.sdm.pbs.model.req.JobFileCallBackReq;
import com.sdm.pbs.model.req.QueryJobReq;
import com.sdm.pbs.model.req.SubmitHpcTaskReq;
@@ -118,6 +119,12 @@ public class TaskController {
return pbsServiceDecorator.querySoftConfig(appUuid);
}
@PostMapping("/delHpcJobs")
@Operation(summary = "批量删除Hpc任务")
SdmResponse delHpcJobs(@RequestBody DelHpcJobsReq req) {
return pbsServiceDecorator.delHpcJobs(req);
}
/*====================================================================*/

View File

@@ -193,6 +193,10 @@ public class SimulationJob implements Serializable {
@TableField("processInstanceId")
private String processInstanceId;
@Schema(description = "任务是否删除N未删除Y已删除")
@TableField("del_flag")
private String delFlag;
@Schema(description = "任务耗时,前端展示字段")
@TableField(value = "costTime",exist = false)
private String costTime;

View File

@@ -42,6 +42,7 @@ public class HpcJobStatusScheduleExcutor {
SimulationJob::getRunId,
SimulationJob::getJobId,
SimulationJob::getJobStatus)
.eq(SimulationJob::getDelFlag,"N")
.isNotNull(SimulationJob::getJobId)
.notIn(SimulationJob::getJobStatus, "Canceled", "Failed")
// 非上传中的,非回传结束的。JobStatus 结束 通知 uploading 只会有一次,回传失败后,人工改表修复

View File

@@ -55,6 +55,4 @@ public interface IPbsService {
*/
ResponseEntity<StreamingResponseBody> downloadFile(String jobId, String fileName,Long fileSize);
}

View File

@@ -134,7 +134,6 @@ public class IPbsHpcServiceImpl implements IPbsService {
return hpcInstructionService.hpcDownloadFile(fileName,fileSize);
}
private HpcResouceInfo buildHpcResourceInfo(List<NodeList> nodes) {
HpcResouceInfo result = new HpcResouceInfo();

View File

@@ -10,6 +10,7 @@ import com.sdm.common.common.SdmResponse;
import com.sdm.common.common.ThreadLocalContext;
import com.sdm.common.entity.enums.MessageTemplateEnum;
import com.sdm.common.entity.req.flowable.AsyncCallbackRequest;
import com.sdm.common.entity.req.pbs.DelHpcJobsReq;
import com.sdm.common.entity.req.system.SendMsgReq;
import com.sdm.common.entity.resp.PageDataResp;
import com.sdm.common.entity.resp.pbs.hpc.FileNodeInfo;
@@ -73,6 +74,13 @@ public class PbsServiceDecorator implements IPbsServiceDecorator {
@Value("#{'${hpc.jobs.middleOrders:Running}'.split(',')}")
private List<String> middleOrders;
// 运行中
@Value("#{'${hpc.jobs.hpcJobIngStatus:Configuring,Queued,Running}'.split(',')}")
private List<String> hpcJobIngStatus;
@Value("#{'${hpc.jobs.hpcFileIngStatus:generating,uploading}'.split(',')}")
private List<String> hpcFileIngStatus;
// 正则匹配%后的单词(\w+ 匹配字母、数字、下划线)
private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("%(\\w+)");
@@ -562,12 +570,69 @@ public class PbsServiceDecorator implements IPbsServiceDecorator {
return pbsService.downloadFile("", fileName,fileSize);
}
public SdmResponse<PageDataResp<List<SimulationJob>>> queryJobs(QueryJobReq req){
public SdmResponse delHpcJobs(DelHpcJobsReq req) {
if(CollectionUtils.isEmpty(req.getHpcJobIds())){
throw new RuntimeException("删除的jobIds不能是空");
}
// 1. 查询待删除的任务列表
List<SimulationJob> jobList = simulationJobService.lambdaQuery()
.in(SimulationJob::getJobId, req.getHpcJobIds())
.list();
if (jobList.isEmpty()) {
throw new RuntimeException("未查询到待删除的HPC任务");
}
// 2. 校验任务状态(非进行中、文件非上传中)
Pair<Boolean,List<SimulationJob>> pairCheck= checkJobStatus(jobList);
if(!pairCheck.getLeft()){
log.error("checkJobStatus true,{}",JSON.toJSONString(pairCheck.getRight()));
throw new RuntimeException("删除的任务状态和文件状态不能是未完成");
}
// 2. 调用HPC批量删除接口 todo
// boolean hpcDelSuccess = batchDeleteHpcJobs(req.getHpcJobIds());
// if (!hpcDelSuccess) {
// return SdmResponse.fail("调用HPC批量删除接口失败");
// }
// 5. 逻辑删除simulation_job表数据
boolean logicDelSuccess = logicDeleteSimulationJob(jobList);
if (!logicDelSuccess) {
return SdmResponse.failed("逻辑删除任务数据失败");
}
return SdmResponse.success("批量删除HPC任务成功");
}
private boolean logicDeleteSimulationJob(List<SimulationJob> jobList) {
for (SimulationJob job : jobList) {
job.setDelFlag("Y");
}
boolean b = simulationJobService.updateBatchById(jobList);
log.info("logicDeleteSimulationJob,b={}",b);
return b;
}
// 任务非进行中,文件非上传中
private Pair<Boolean, List<SimulationJob>> checkJobStatus(List<SimulationJob> jobList) {
List<SimulationJob>invalidJobs=new ArrayList<>();
boolean check = false;
for(SimulationJob job:jobList){
if(hpcJobIngStatus.contains(job.getJobStatus())||
hpcFileIngStatus.contains(job.getFileStatus())){
invalidJobs.add(job);
check=true;
}
}
return Pair.of(check,invalidJobs);
}
public SdmResponse<PageDataResp<List<SimulationJob>>> queryJobs(QueryJobReq req){
PageHelper.startPage(req.getCurrent(), req.getSize());
// 构建查询条件
LambdaQueryChainWrapper<SimulationJob> queryChain = simulationJobService.lambdaQuery();
// 未删除的
queryChain.eq(SimulationJob::getDelFlag,"N");
// ... 你的原有查询条件逻辑保持不变 ...
if (req.getJobName() != null && !req.getJobName().trim().isEmpty()) {
queryChain.like(SimulationJob::getJobName, req.getJobName().trim());

View File

@@ -11,7 +11,9 @@ import com.sdm.common.entity.enums.FilePermissionEnum;
import com.sdm.common.entity.enums.NodeTypeEnum;
import com.sdm.common.entity.req.data.*;
import com.sdm.common.entity.req.project.SpdmNodeListReq;
import com.sdm.common.entity.req.system.UserListReq;
import com.sdm.common.entity.req.system.UserQueryReq;
import com.sdm.common.entity.resp.PageDataResp;
import com.sdm.common.entity.resp.data.BatchCreateNormalDirResp;
import com.sdm.common.entity.resp.data.FileMetadataInfoResp;
import com.sdm.common.entity.resp.system.CIDUserResp;
@@ -604,6 +606,23 @@ public class LyricInternalServiceImpl implements ILyricInternalService {
try {
demandMapper.addDemand(demandReq, tenantId, jobNumber);
if (CollectionUtils.isNotEmpty(memberList)) {
// 将利元亨的用户工号与userId映射
Map<String, Long> usernameToUserIdMap = new HashMap<>();
UserListReq userListReq = new UserListReq();
userListReq.setTenantId(tenantId);
userListReq.setCurrent(1);
userListReq.setSize(9999);
SdmResponse<PageDataResp<List<CIDUserResp>>> pageDataRespSdmResponse = sysUserFeignClient.listUser(userListReq);
if (pageDataRespSdmResponse.isSuccess() && pageDataRespSdmResponse.getData().getData() != null) {
List<CIDUserResp> userList = pageDataRespSdmResponse.getData().getData();
usernameToUserIdMap = userList.stream().collect(Collectors.toMap(CIDUserResp::getUsername, CIDUserResp::getUserId));
}
for (SpdmDemandRelateMemberReq member : memberList) {
Long cidUserId = usernameToUserIdMap.get(String.valueOf(member.getUserId()));
if (cidUserId != null) {
member.setUserId(cidUserId);
}
}
demandMapper.addDemandMember(memberList);
}
} catch (Exception e) {

View File

@@ -3385,6 +3385,23 @@ public class NodeServiceImpl extends ServiceImpl<SimulationNodeMapper, Simulatio
ThreadLocalContext.setUserId(currentUserId);
demandMapper.addDemand(demandAddReq, tenantId, currentUserId);
if (CollectionUtils.isNotEmpty(memberList)) {
// 将利元亨的用户工号与userId映射
Map<String, Long> usernameToUserIdMap = new HashMap<>();
UserListReq userListReq = new UserListReq();
userListReq.setTenantId(tenantId);
userListReq.setCurrent(1);
userListReq.setSize(9999);
SdmResponse<PageDataResp<List<CIDUserResp>>> pageDataRespSdmResponse = sysUserFeignClient.listUser(userListReq);
if (pageDataRespSdmResponse.isSuccess() && pageDataRespSdmResponse.getData().getData() != null) {
List<CIDUserResp> userList = pageDataRespSdmResponse.getData().getData();
usernameToUserIdMap = userList.stream().collect(Collectors.toMap(CIDUserResp::getUsername, CIDUserResp::getUserId));
}
for (SpdmDemandRelateMemberReq member : memberList) {
Long cidUserId = usernameToUserIdMap.get(String.valueOf(member.getUserId()));
if (cidUserId != null) {
member.setUserId(cidUserId);
}
}
demandMapper.addDemandMember(memberList);
}