This commit is contained in:
2026-04-01 13:39:00 +08:00
10 changed files with 250 additions and 215 deletions

View File

@@ -1,55 +1,56 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<!-- 彩色日志格式 -->
<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr([%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}" />
<!-- 普通日志格式(无颜色) -->
<property name="FILE_LOG_PATTERN" value="[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${PID:- } [%15.15t] %logger : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}" />
<!--定义日志文件存储地址 勿在 LogBack 的配置中使用相对路径-->
<!-- 日志文件存储地址 -->
<property name="LOG_HOME" value="/opt/sdm/capability/log" />
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/console.log.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<!-- ===================== console.log 输出3GB上限 ===================== -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/console.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 自动压缩 -->
<FileNamePattern>${LOG_HOME}/console.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<!-- 单个文件 100MB -->
<maxFileSize>100MB</maxFileSize>
<!-- 保存 15 天 -->
<MaxHistory>15</MaxHistory>
<!-- 总大小 3GB -->
<TotalSizeCap>3GB</TotalSizeCap>
<cleanHistoryOnStart>true</cleanHistoryOnStart>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
<!--日志文件最大的大小-->
<!-- <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">-->
<!-- <MaxFileSize>10MB</MaxFileSize>-->
<!-- </triggeringPolicy>-->
</appender>
<!-- 4. core.log 专用输出器(保留 callerInfo 格式) -->
<!-- ===================== core.log 专用输出2GB上限 ===================== -->
<appender name="CORE_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/core.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<!-- 自动压缩 -->
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<!-- 总大小 2GB -->
<TotalSizeCap>2GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!-- 仅 core.log 显示真实调用位置(类名.方法名(行号) -->
<pattern>[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${PID:- } [%15.15t] %X{callerInfo} : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
@@ -63,23 +64,22 @@
<appender-ref ref="FILE" />
</root>
<!-- 绑定 FeignClient → 输出到日志文件 -->
<!-- 绑定 FeignClient -->
<logger name="FeignClient" level="INFO" additivity="false">
<appender-ref ref="FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- 绑定 coreLogger → 输出到 core.log + 控制台 -->
<!-- 绑定 coreLogger -->
<logger name="coreLogger" level="INFO" additivity="false">
<appender-ref ref="CORE_FILE" /> <!-- 核心日志写入 core.log -->
<appender-ref ref="STDOUT" /> <!-- 同时输出到控制台(显示 CoreLogger -->
<appender-ref ref="CORE_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- ===================== 你原来的所有 SQL/MyBatis/包日志 全部保留 ===================== -->
<!-- MyBatis SQL语句输出配置 -->
<logger name="org.apache.ibatis" level="DEBUG"/>
<logger name="org.apache.ibatis.session.AutoMappingUnknownColumnBehavior" level="ERROR"/>
<!-- MyBatis SQL语句详细输出 -->
<logger name="org.apache.ibatis.logging" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc.BaseJdbcLogger" level="DEBUG"/>
@@ -110,4 +110,5 @@
<logger name="com.sdm.capability" level="INFO"/>
<logger name="com.sdm.capability.dao" level="DEBUG"/>
</configuration>

View File

@@ -13,62 +13,59 @@
<!-- 日志文件存储地址 -->
<property name="LOG_HOME" value="/home/app/data/logs" />
<!-- 1. 控制台输出默认输出DEBUG及以上级别 -->
<!-- 1. 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- 2. INFO级别日志输出到running.log仅包含INFO及以上不包含DEBUG -->
<!-- 2. INFO级别日志输出到 running.log -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/running.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>3GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
<!-- 过滤只接受INFO及以上级别INFO, WARN, ERROR排除DEBUG -->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
</appender>
<!-- 3. DEBUG级别日志输出到running_debug.log仅包含DEBUG级别 -->
<!-- 3. DEBUG级别日志输出到 running_debug.log -->
<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/running_debug.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/running_debug.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/running_debug.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>2GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
<!-- 过滤只接受DEBUG级别 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>DEBUG</level>
<onMatch>ACCEPT</onMatch> <!-- 匹配DEBUG级别则接受 -->
<onMismatch>DENY</onMismatch> <!-- 不匹配则拒绝 -->
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 4. core.log 专用输出器(保留 callerInfo 格式) -->
<!-- 4. core.log 专用输出器 -->
<appender name="CORE_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/core.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>1GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!-- 仅 core.log 显示真实调用位置(类名.方法名(行号) -->
<pattern>[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${PID:- } [%15.15t] %X{callerInfo} : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
@@ -76,31 +73,29 @@
</filter>
</appender>
<!-- 全局日志级别设置为DEBUG确保DEBUG日志能被捕获 -->
<!-- 全局日志 -->
<root level="INFO">
<appender-ref ref="STDOUT" /> <!-- 控制台输出DEBUG及以上 -->
<appender-ref ref="INFO_FILE" /> <!-- INFO及以上输出到running.log -->
<appender-ref ref="DEBUG_FILE" /> <!-- 仅DEBUG输出到running_debug.log -->
<appender-ref ref="STDOUT" />
<appender-ref ref="INFO_FILE" />
<appender-ref ref="DEBUG_FILE" />
</root>
<!-- 绑定 FeignClient → 输出到日志文件 -->
<!-- 绑定 FeignClient -->
<logger name="FeignClient" level="INFO" additivity="false">
<appender-ref ref="INFO_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- 绑定 coreLogger → 输出到 core.log + 控制台 -->
<!-- 绑定 coreLogger -->
<logger name="coreLogger" level="INFO" additivity="false">
<appender-ref ref="CORE_FILE" /> <!-- 核心日志写入 core.log -->
<appender-ref ref="STDOUT" /> <!-- 同时输出到控制台(显示 CoreLogger -->
<appender-ref ref="CORE_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- ===================== 你原来的所有 SQL/MyBatis/包日志 全部保留 ===================== -->
<!-- MyBatis SQL语句输出配置 -->
<logger name="org.apache.ibatis" level="DEBUG"/>
<logger name="org.apache.ibatis.session.AutoMappingUnknownColumnBehavior" level="ERROR"/>
<!-- MyBatis SQL语句详细输出 -->
<logger name="org.apache.ibatis.logging" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc.BaseJdbcLogger" level="DEBUG"/>
@@ -131,4 +126,5 @@
<logger name="com.sdm.data" level="INFO"/>
<logger name="com.sdm.data.dao" level="DEBUG"/>
</configuration>

View File

@@ -13,62 +13,59 @@
<!-- 日志文件存储地址 -->
<property name="LOG_HOME" value="/home/app/flowable/logs" />
<!-- 1. 控制台输出默认输出DEBUG及以上级别 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<!-- 1. 控制台输出 -->
<appender name="STDOUT" class="ch.qos.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- 2. INFO级别日志输出到running.log仅包含INFO及以上不包含DEBUG -->
<!-- 2. INFO级别日志running.log -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/running.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>3GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<encoder>
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
<!-- 过滤只接受INFO及以上级别INFO, WARN, ERROR排除DEBUG -->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
</appender>
<!-- 3. DEBUG级别日志输出到running_debug.log仅包含DEBUG级别 -->
<!-- 3. DEBUG级别日志running_debug.log -->
<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/running_debug.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/running_debug.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/running_debug.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>2GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<encoder>
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
<!-- 过滤只接受DEBUG级别 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>DEBUG</level>
<onMatch>ACCEPT</onMatch> <!-- 匹配DEBUG级别则接受 -->
<onMismatch>DENY</onMismatch> <!-- 不匹配则拒绝 -->
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 4. core.log 专用输出器(保留 callerInfo 格式) -->
<!-- 4. core.log 专用输出器 -->
<appender name="CORE_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/core.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>1GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!-- 仅 core.log 显示真实调用位置(类名.方法名(行号) -->
<encoder>
<pattern>[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${PID:- } [%15.15t] %X{callerInfo} : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
@@ -76,26 +73,26 @@
</filter>
</appender>
<!-- 全局日志级别设置为DEBUG确保DEBUG日志能被捕获 -->
<!-- 根日志 -->
<root level="INFO">
<appender-ref ref="STDOUT" /> <!-- 控制台输出DEBUG及以上 -->
<appender-ref ref="INFO_FILE" /> <!-- INFO及以上输出到running.log -->
<appender-ref ref="DEBUG_FILE" /> <!-- 仅DEBUG输出到running_debug.log -->
<appender-ref ref="STDOUT" />
<appender-ref ref="INFO_FILE" />
<appender-ref ref="DEBUG_FILE" />
</root>
<!-- 绑定 FeignClient → 输出到日志文件 -->
<!-- FeignClient 日志 -->
<logger name="FeignClient" level="INFO" additivity="false">
<appender-ref ref="INFO_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- 绑定 coreLogger → 输出到 core.log + 控制台 -->
<!-- coreLogger 日志 -->
<logger name="coreLogger" level="INFO" additivity="false">
<appender-ref ref="CORE_FILE" /> <!-- 核心日志写入 core.log -->
<appender-ref ref="STDOUT" /> <!-- 同时输出到控制台(显示 CoreLogger -->
<appender-ref ref="CORE_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- 业务包日志 -->
<logger name="com.sdm.flowable" level="INFO"/>
</configuration>

View File

@@ -1,52 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<!-- 彩色日志格式 -->
<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr([%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}" />
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="FILE_LOG_PATTERN" value="[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${PID:- } [%15.15t] %logger : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}" />
<!-- 日志文件存储地址 -->
<property name="LOG_HOME" value="/home/app/pbs/logs" />
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/console.log.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<!-- ===================== console.log 输出3GB上限 ===================== -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/console.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/console.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>3GB</TotalSizeCap>
<cleanHistoryOnStart>true</cleanHistoryOnStart>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
<!--日志文件最大的大小-->
<!-- <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">-->
<!-- <MaxFileSize>10MB</MaxFileSize>-->
<!-- </triggeringPolicy>-->
</appender>
<!-- 4. core.log 专用输出器(保留 callerInfo 格式) -->
<!-- ===================== core.log 专用输出2GB上限 ===================== -->
<appender name="CORE_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/core.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>2GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!-- 仅 core.log 显示真实调用位置(类名.方法名(行号) -->
<pattern>[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${PID:- } [%15.15t] %X{callerInfo} : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
@@ -60,23 +57,22 @@
<appender-ref ref="FILE" />
</root>
<!-- 绑定 FeignClient → 输出到日志文件 -->
<!-- 绑定 FeignClient -->
<logger name="FeignClient" level="INFO" additivity="false">
<appender-ref ref="FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- 绑定 coreLogger → 输出到 core.log + 控制台 -->
<!-- 绑定 coreLogger -->
<logger name="coreLogger" level="INFO" additivity="false">
<appender-ref ref="CORE_FILE" /> <!-- 核心日志写入 core.log -->
<appender-ref ref="STDOUT" /> <!-- 同时输出到控制台(显示 CoreLogger -->
<appender-ref ref="CORE_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- ===================== 你原来的所有 SQL/MyBatis/包日志 全部保留 ===================== -->
<!-- MyBatis SQL语句输出配置 -->
<logger name="org.apache.ibatis" level="DEBUG"/>
<logger name="org.apache.ibatis.session.AutoMappingUnknownColumnBehavior" level="ERROR"/>
<!-- MyBatis SQL语句详细输出 -->
<logger name="org.apache.ibatis.logging" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc.BaseJdbcLogger" level="DEBUG"/>
@@ -107,4 +103,5 @@
<logger name="com.sdm.pbs" level="INFO"/>
<logger name="com.sdm.pbs.dao" level="DEBUG"/>
</configuration>

View File

@@ -283,6 +283,16 @@ public class SimulationNodeController implements ISimulationNodeFeignClient {
return nodeService.getTaskCompleteStatisticsByDiscipline(req);
}
/**
* 基于 学科学科不是节点是task的一个属性查询任务达成统计
*/
@PostMapping("/getTaskAchieveStatisticsByDiscipline")
@Operation(summary = "基于 学科学科不是节点是task的一个属性查询任务达成统计", description = "仿真任务达成统计")
public SdmResponse getTaskAchieveStatisticsByDiscipline(@RequestBody @Validated CommonGetCompleteStatisticsReq req) {
req.setTenantId(ThreadLocalContext.getTenantId());
return nodeService.getTaskAchieveStatisticsByDiscipline(req);
}
/** 基于 学科学科不是节点是task的一个属性查询指标完成统计
* 需要传:
* tag1-tag10 标签

View File

@@ -59,6 +59,8 @@ public interface INodeService extends IService<SimulationNode> {
SdmResponse getTaskCompleteStatisticsByDiscipline(CommonGetCompleteStatisticsReq req);
SdmResponse getTaskAchieveStatisticsByDiscipline(CommonGetCompleteStatisticsReq req);
SdmResponse getPerformanceCompleteStatisticsByDiscipline(CommonGetCompleteStatisticsReq req);
// 工位评审通过统计查询

View File

@@ -3135,6 +3135,73 @@ public class NodeServiceImpl extends ServiceImpl<SimulationNodeMapper, Simulatio
return buildStatisticsResponse(statisticsMap, allExeStatus, "allExeStatus");
}
@Override
public SdmResponse getTaskAchieveStatisticsByDiscipline(CommonGetCompleteStatisticsReq req) {
// 构建查询条件
var query = simulationTaskService.lambdaQuery()
.eq(SimulationTask::getTenantId, ThreadLocalContext.getTenantId())
.isNotNull(SimulationTask::getDiscipline)
.ne(SimulationTask::getDiscipline, "")
.isNotNull(SimulationTask::getExeStatus);
// 处理 discipline 字段,支持逗号分隔的多个值
if (StringUtils.isNotBlank(req.getDiscipline())) {
String[] disciplines = req.getDiscipline().split(",");
query.in(SimulationTask::getDiscipline, Arrays.asList(disciplines));
}
// 处理 tag1-tag10支持逗号分隔的多个 ID
if (StringUtils.isNotBlank(req.getTag1())) {
String[] tag1Values = req.getTag1().split(",");
query.in(SimulationTask::getTag1, Arrays.asList(tag1Values));
}
if (StringUtils.isNotBlank(req.getTag2())) {
String[] tag2Values = req.getTag2().split(",");
query.in(SimulationTask::getTag2, Arrays.asList(tag2Values));
}
if (StringUtils.isNotBlank(req.getTag3())) {
String[] tag3Values = req.getTag3().split(",");
query.in(SimulationTask::getTag3, Arrays.asList(tag3Values));
}
if (StringUtils.isNotBlank(req.getTag4())) {
String[] tag4Values = req.getTag4().split(",");
query.in(SimulationTask::getTag4, Arrays.asList(tag4Values));
}
if (StringUtils.isNotBlank(req.getTag5())) {
String[] tag5Values = req.getTag5().split(",");
query.in(SimulationTask::getTag5, Arrays.asList(tag5Values));
}
if (StringUtils.isNotBlank(req.getTag6())) {
String[] tag6Values = req.getTag6().split(",");
query.in(SimulationTask::getTag6, Arrays.asList(tag6Values));
}
if (StringUtils.isNotBlank(req.getTag7())) {
String[] tag7Values = req.getTag7().split(",");
query.in(SimulationTask::getTag7, Arrays.asList(tag7Values));
}
if (StringUtils.isNotBlank(req.getTag9())) {
String[] tag9Values = req.getTag9().split(",");
query.in(SimulationTask::getTag9, Arrays.asList(tag9Values));
}
if (StringUtils.isNotBlank(req.getTag10())) {
String[] tag10Values = req.getTag10().split(",");
query.in(SimulationTask::getTag10, Arrays.asList(tag10Values));
}
List<SimulationTask> simulationTasks = query.list();
// 按学科 (discipline) 分组统计任务完成状态
Set<String> allExeStatus = new HashSet<>();
Map<String, CommonStatisticsVo> statisticsMap = buildCommonStatistics(
simulationTasks,
SimulationTask::getDiscipline,
SimulationTask::getAchieveStatus,
allExeStatus
);
return buildStatisticsResponse(statisticsMap, allExeStatus, "allExeStatus");
}
public SdmResponse getPerformanceCompleteStatisticsByDiscipline(CommonGetCompleteStatisticsReq req){
List<PerformanceCompleteFromDisciplineVo> performanceList = this.baseMapper.getPerformanceCompleteStatisticsByDiscipline(req);
// 按学科(discipline)分组统计指标完成状态

View File

@@ -13,62 +13,59 @@
<!-- 日志文件存储地址 -->
<property name="LOG_HOME" value="/home/app/project/logs" />
<!-- 1. 控制台输出默认输出DEBUG及以上级别 -->
<!-- 1. 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- 2. INFO级别日志输出到running.log仅包含INFO及以上不包含DEBUG -->
<!-- 2. INFO级别日志running.log 3GB -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/running.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>3GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
<!-- 过滤只接受INFO及以上级别INFO, WARN, ERROR排除DEBUG -->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
</appender>
<!-- 3. DEBUG级别日志输出到running_debug.log仅包含DEBUG级别 -->
<!-- 3. DEBUG级别日志running_debug.log 2GB -->
<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/running_debug.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/running_debug.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/running_debug.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>2GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
<!-- 过滤只接受DEBUG级别 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>DEBUG</level>
<onMatch>ACCEPT</onMatch> <!-- 匹配DEBUG级别则接受 -->
<onMismatch>DENY</onMismatch> <!-- 不匹配则拒绝 -->
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 4. core.log 专用输出器(保留 callerInfo 格式) -->
<!-- 4. core.log 专用输出器 → 1GB -->
<appender name="CORE_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/core.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>1GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!-- 仅 core.log 显示真实调用位置(类名.方法名(行号) -->
<pattern>[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${PID:- } [%15.15t] %X{callerInfo} : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
@@ -76,31 +73,29 @@
</filter>
</appender>
<!-- 全局日志级别设置为DEBUG确保DEBUG日志能被捕获 -->
<!-- 全局日志 -->
<root level="INFO">
<appender-ref ref="STDOUT" /> <!-- 控制台输出DEBUG及以上 -->
<appender-ref ref="INFO_FILE" /> <!-- INFO及以上输出到running.log -->
<appender-ref ref="DEBUG_FILE" /> <!-- 仅DEBUG输出到running_debug.log -->
<appender-ref ref="STDOUT" />
<appender-ref ref="INFO_FILE" />
<appender-ref ref="DEBUG_FILE" />
</root>
<!-- 绑定 FeignClient → 输出到日志文件 -->
<!-- FeignClient -->
<logger name="FeignClient" level="INFO" additivity="false">
<appender-ref ref="INFO_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- 绑定 coreLogger → 输出到 core.log + 控制台 -->
<!-- coreLogger -->
<logger name="coreLogger" level="INFO" additivity="false">
<appender-ref ref="CORE_FILE" /> <!-- 核心日志写入 core.log -->
<appender-ref ref="STDOUT" /> <!-- 同时输出到控制台(显示 CoreLogger -->
<appender-ref ref="CORE_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- ===================== 你所有原有日志配置 100% 保留 ===================== -->
<!-- MyBatis SQL语句输出配置 -->
<logger name="org.apache.ibatis" level="DEBUG"/>
<logger name="org.apache.ibatis.session.AutoMappingUnknownColumnBehavior" level="ERROR"/>
<!-- MyBatis SQL语句详细输出 -->
<logger name="org.apache.ibatis.logging" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc.BaseJdbcLogger" level="DEBUG"/>
@@ -131,7 +126,5 @@
<logger name="com.sdm.project" level="INFO"/>
<logger name="com.sdm.project.dao" level="DEBUG"/>
<!-- 新增com.sdm.outbridge 包日志级别配置 -->
<logger name="com.sdm.outbridge" level="INFO"/>
<logger name="com.sdm.project.dao" level="DEBUG"/>
</configuration>

View File

@@ -20,42 +20,30 @@
</encoder>
</appender>
<!-- 文件输出当前日志为running.log按大小和时间滚动 -->
<!-- ===================== running.log 输出3GB上限 ===================== -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 当前活动日志文件名称始终为running.log -->
<file>${LOG_HOME}/running.log</file>
<!-- 滚动策略:结合时间和大小 -->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 归档文件命名格式running.log.年--日.序号 -->
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!-- 日志保留天数 -->
<MaxHistory>30</MaxHistory>
<!-- 所有归档文件的总大小上限 -->
<TotalSizeCap>500MB</TotalSizeCap>
<!-- 单个文件大小上限(达到后触发滚动) -->
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>3GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- 4. core.log 专用输出器(保留 callerInfo 格式) -->
<!-- ===================== core.log 专用输出2GB上限 ===================== -->
<appender name="CORE_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/core.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>2GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!-- 仅 core.log 显示真实调用位置(类名.方法名(行号) -->
<pattern>[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${PID:- } [%15.15t] %X{callerInfo} : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
@@ -69,23 +57,22 @@
<appender-ref ref="FILE" />
</root>
<!-- 绑定 FeignClient → 输出到日志文件 -->
<!-- 绑定 FeignClient -->
<logger name="FeignClient" level="INFO" additivity="false">
<appender-ref ref="FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- 绑定 coreLogger → 输出到 core.log + 控制台 -->
<!-- 绑定 coreLogger -->
<logger name="coreLogger" level="INFO" additivity="false">
<appender-ref ref="CORE_FILE" /> <!-- 核心日志写入 core.log -->
<appender-ref ref="STDOUT" /> <!-- 同时输出到控制台(显示 CoreLogger -->
<appender-ref ref="CORE_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- ===================== 你所有原有日志配置 100% 保留 ===================== -->
<!-- MyBatis SQL语句输出配置 -->
<logger name="org.apache.ibatis" level="DEBUG"/>
<logger name="org.apache.ibatis.session.AutoMappingUnknownColumnBehavior" level="ERROR"/>
<!-- MyBatis SQL语句详细输出 -->
<logger name="org.apache.ibatis.logging" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc.BaseJdbcLogger" level="DEBUG"/>
@@ -115,7 +102,6 @@
<logger name="java.sql.ResultSet" level="DEBUG"/>
<logger name="com.sdm.system" level="INFO"/>
<!-- 新增com.sdm.outbridge 包日志级别配置 -->
<logger name="com.sdm.outbridge" level="INFO"/>
<logger name="com.sdm.system.dao" level="DEBUG"/>
</configuration>

View File

@@ -20,42 +20,30 @@
</encoder>
</appender>
<!-- 文件输出当前日志为running.log按大小和时间滚动 -->
<!-- ===================== running.log 输出3GB上限 ===================== -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 当前活动日志文件名称始终为running.log -->
<file>${LOG_HOME}/running.log</file>
<!-- 滚动策略:结合时间和大小 -->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 归档文件命名格式running.log.年--日.序号 -->
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!-- 日志保留天数 -->
<MaxHistory>30</MaxHistory>
<!-- 所有归档文件的总大小上限 -->
<TotalSizeCap>500MB</TotalSizeCap>
<!-- 单个文件大小上限(达到后触发滚动) -->
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/running.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>3GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- 4. core.log 专用输出器(保留 callerInfo 格式) -->
<!-- ===================== core.log 专用输出2GB上限 ===================== -->
<appender name="CORE_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/core.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<MaxHistory>30</MaxHistory>
<TotalSizeCap>500MB</TotalSizeCap>
<maxFileSize>10MB</maxFileSize>
<FileNamePattern>${LOG_HOME}/core.log.%d{yyyy-MM-dd}.%i.log.gz</FileNamePattern>
<maxFileSize>100MB</maxFileSize>
<MaxHistory>15</MaxHistory>
<TotalSizeCap>2GB</TotalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!-- 仅 core.log 显示真实调用位置(类名.方法名(行号) -->
<pattern>[%X{traceId}] %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${PID:- } [%15.15t] %X{callerInfo} : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
@@ -69,24 +57,22 @@
<appender-ref ref="FILE" />
</root>
<!-- 绑定 FeignClient → 输出到日志文件 -->
<!-- 绑定 FeignClient -->
<logger name="FeignClient" level="INFO" additivity="false">
<appender-ref ref="FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- 绑定 coreLogger → 输出到 core.log + 控制台 -->
<!-- 绑定 coreLogger -->
<logger name="coreLogger" level="INFO" additivity="false">
<appender-ref ref="CORE_FILE" /> <!-- 核心日志写入 core.log -->
<appender-ref ref="STDOUT" /> <!-- 同时输出到控制台(显示 CoreLogger -->
<appender-ref ref="CORE_FILE" />
<appender-ref ref="STDOUT" />
</logger>
<!-- ===================== 你所有原有日志配置 100% 保留 ===================== -->
<!-- MyBatis SQL语句输出配置 -->
<logger name="org.apache.ibatis" level="DEBUG"/>
<logger name="org.apache.ibatis.session.AutoMappingUnknownColumnBehavior" level="ERROR"/>
<!-- MyBatis SQL语句详细输出 -->
<logger name="org.apache.ibatis.logging" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc" level="DEBUG"/>
<logger name="org.apache.ibatis.logging.jdbc.BaseJdbcLogger" level="DEBUG"/>