@@ -0,0 +1,187 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- 说明:
1、日志级别及文件 日志记录采用分级记录,级别与日志文件名相对应,不同级别的日志信息记录到不同的日志文件中。
2、日志级别可以根据开发环境进行配置, 为方便统一管理查看日志, 日志文件路径统一由LOG_PATH: - .配置在/home/项目名称/logs
-->
<configuration >
<!-- 引入默认设置 -->
<include resource= "org/springframework/boot/logging/logback/defaults.xml" />
<!-- 编码格式设置 -->
<property name= "ENCODING" value= "UTF-8" />
<!-- 日志文件的存储地址, 由application.yml中的logging.path配置, 根路径默认同项目路径 -->
<!-- <property name="LOG_HOME" value="${LOG_PATH: - ./logs}"/> -->
<property name= "LOG_HOME" value= "/Users/yovinchen/Desktop/project/xlcs/xlcs-parent/data/logs" />
<property name= "LOG_FILE_MAX_SIZE" value= "100MB" />
<property name= "LOG_FILE_MAX_HISTORY" value= "180" />
<property name= "LOG_TOTAL_SIZE_CAP" value= "100GB" />
<!-- 应用名称 -->
<springProperty scope= "context" name= "APP_NAME" source= "spring.application.name" defaultValue= "springBoot" />
<!-- 常规输出格式:%d表示日期, %thread表示线程名, % - 5level: 级别从左显示5个字符宽度%msg: 日志消息, %n是换行符 -->
<property name= "NORMAL_LOG_PATTERN"
value= "%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}.%method@%line - %msg%n" />
<!-- 彩色输出格式: magenta: 洋红, boldMagenta: 粗红, yan: 青色, ·#══> -->
<property name= "CONSOLE_LOG_PATTERN"
value= "%boldMagenta([%d{yyyy-MM-dd HH:mm:ss.SSS}]) %red([%thread]) %boldMagenta(%-5level) %blue(%logger{20}.%method@%line) %magenta(·#═>) %cyan(%msg%n)" />
<!-- ==========================控制台输出设置========================== -->
<appender name= "CONSOLE" class= "ch.qos.logback.core.ConsoleAppender" >
<encoder >
<pattern > ${CONSOLE_LOG_PATTERN}</pattern>
<charset > ${ENCODING}</charset>
</encoder>
</appender>
<!-- ==========================按天输出DEBUG日志设置========================== -->
<appender name= "DEBUG_FILE" class= "ch.qos.logback.core.rolling.RollingFileAppender" >
<rollingPolicy class= "ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy" >
<!-- 设置文件命名格式 -->
<!-- <FileNamePattern>${LOG_HOME}/${APP_NAME}/debug/${POD_NAME} - %d{yyyy - MM - dd} - %i.log</FileNamePattern> -->
<FileNamePattern > ${LOG_HOME}/${APP_NAME}/debug/${APP_NAME}-%d{yyyy-MM-dd}-%i.log</FileNamePattern>
<!-- 设置日志文件大小, 超过就重新生成文件, 默认10M -->
<maxFileSize > ${LOG_FILE_MAX_SIZE}</maxFileSize>
<!-- 日志文件保留天数, 默认30天 -->
<maxHistory > ${LOG_FILE_MAX_HISTORY}</maxHistory>
<totalSizeCap > ${LOG_TOTAL_SIZE_CAP}</totalSizeCap>
</rollingPolicy>
<filter class= "ch.qos.logback.classic.filter.LevelFilter" >
<level > DEBUG</level> <!-- 设置拦截的对象为INFO级别日志 -->
<onMatch > ACCEPT</onMatch> <!-- 当遇到了DEBUG级别时, 启用该段配置 -->
<onMismatch > DENY</onMismatch> <!-- 没有遇到DEBUG级别日志时, 屏蔽该段配置 -->
</filter>
<encoder class= "ch.qos.logback.classic.encoder.PatternLayoutEncoder" >
<pattern > ${NORMAL_LOG_PATTERN}</pattern>
<charset > ${ENCODING}</charset>
</encoder>
</appender>
<!-- ==========================按天输出日志设置========================== -->
<appender name= "INFO_FILE" class= "ch.qos.logback.core.rolling.RollingFileAppender" >
<rollingPolicy class= "ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy" >
<!-- 设置文件命名格式 -->
<!-- <FileNamePattern>${LOG_HOME}/${APP_NAME}/info/${POD_NAME} - %d{yyyy - MM - dd} - %i.log</FileNamePattern> -->
<FileNamePattern > ${LOG_HOME}/${APP_NAME}/info/${APP_NAME}-%d{yyyy-MM-dd}-%i.log</FileNamePattern>
<!-- 设置日志文件大小, 超过就重新生成文件, 默认10M -->
<maxFileSize > ${LOG_FILE_MAX_SIZE}</maxFileSize>
<!-- 日志文件保留天数, 默认30天 -->
<maxHistory > ${LOG_FILE_MAX_HISTORY}</maxHistory>
<totalSizeCap > ${LOG_TOTAL_SIZE_CAP}</totalSizeCap>
</rollingPolicy>
<filter class= "ch.qos.logback.classic.filter.LevelFilter" >
<level > INFO</level> <!-- 设置拦截的对象为INFO级别日志 -->
<onMatch > ACCEPT</onMatch> <!-- 当遇到了INFO级别时, 启用该段配置 -->
<onMismatch > DENY</onMismatch> <!-- 没有遇到INFO级别日志时, 屏蔽该段配置 -->
</filter>
<encoder class= "ch.qos.logback.classic.encoder.PatternLayoutEncoder" >
<pattern > ${NORMAL_LOG_PATTERN}</pattern>
<charset > ${ENCODING}</charset>
</encoder>
</appender>
<!-- ==========================按天输出ERROR级别日志设置========================== -->
<appender name= "ERROR_FILE" class= "ch.qos.logback.core.rolling.RollingFileAppender" >
<rollingPolicy class= "ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy" >
<!-- 设置文件命名格式 -->
<FileNamePattern > ${LOG_HOME}/${APP_NAME}/error/${POD_NAME}-%d{yyyy-MM-dd}-%i.log</FileNamePattern>
<!-- 设置日志文件大小, 超过就重新生成文件, 默认10M -->
<maxFileSize > ${LOG_FILE_MAX_SIZE}</maxFileSize>
<!-- 日志文件保留天数, 默认30天 -->
<maxHistory > ${LOG_FILE_MAX_HISTORY}</maxHistory>
<totalSizeCap > ${LOG_TOTAL_SIZE_CAP}</totalSizeCap>
</rollingPolicy>
<filter class= "ch.qos.logback.classic.filter.LevelFilter" >
<level > ERROR</level> <!-- 设置拦截的对象为ERROR级别日志 -->
<onMatch > ACCEPT</onMatch> <!-- 当遇到了ERROR级别时, 启用该段配置 -->
<onMismatch > DENY</onMismatch> <!-- 没有遇到ERROR级别日志时, 屏蔽该段配置 -->
</filter>
<encoder class= "ch.qos.logback.classic.encoder.PatternLayoutEncoder" >
<pattern > ${NORMAL_LOG_PATTERN}</pattern>
<charset > ${ENCODING}</charset>
</encoder>
</appender>
<!-- druid慢查询日志输出, 没有使用druid监控的去掉这部分以及下面的一个相关logger -->
<appender name= "DRUID_FILE" class= "ch.qos.logback.core.rolling.RollingFileAppender" >
<rollingPolicy class= "ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy" >
<!-- 设置文件命名格式 -->
<!-- <FileNamePattern>${LOG_HOME}/${APP_NAME}/druid/SlowSql_${POD_NAME} - %d{yyyy - MM - dd} - %i.log</FileNamePattern> -->
<FileNamePattern > ${LOG_HOME}/${APP_NAME}/druid/SlowSql_${APP_NAME}-%d{yyyy-MM-dd}-%i.log</FileNamePattern>
<!-- 设置日志文件大小, 超过就重新生成文件, 默认10M -->
<maxFileSize > ${LOG_FILE_MAX_SIZE}</maxFileSize>
<!-- 日志文件保留天数, 默认30天 -->
<maxHistory > ${LOG_FILE_MAX_HISTORY}</maxHistory>
<totalSizeCap > ${LOG_TOTAL_SIZE_CAP}</totalSizeCap>
</rollingPolicy>
<filter class= "ch.qos.logback.classic.filter.LevelFilter" >
<level > ERROR</level> <!-- 设置拦截的对象为ERROR级别日志 -->
<onMatch > ACCEPT</onMatch> <!-- 当遇到了ERROR级别时, 启用该段配置 -->
<onMismatch > DENY</onMismatch> <!-- 没有遇到ERROR级别日志时, 屏蔽该段配置 -->
</filter>
<encoder class= "ch.qos.logback.classic.encoder.PatternLayoutEncoder" >
<pattern > ${NORMAL_LOG_PATTERN}</pattern>
<charset > ${ENCODING}</charset>
</encoder>
</appender>
<!-- ===日志输出级别, OFF level > FATAL > ERROR > WARN > INFO > DEBUG > ALL level=== -->
<logger name= "com.asiainfo" level= "INFO" />
<logger name= "org.springframework.boot.web.embedded.tomcat.TomcatWebServer" level= "INFO" />
<logger name= "org.springframework" level= "WARN" />
<logger name= "com.baomidou.mybatisplus" level= "WARN" />
<logger name= "org.apache.kafka.clients.NetworkClient" level= "INFO" />
<logger name= "org.apache.kafka.clients.consumer.ConsumerConfig" level= "INFO" />
<!-- druid相关logger, -->
<logger name= "com.alibaba.druid.filter.stat.StatFilter" level= "ERROR" >
<appender-ref ref= "DRUID_FILE" />
<appender-ref ref= "CONSOLE" />
</logger>
<appender name= "GELF" class= "biz.paluch.logging.gelf.logback.GelfLogbackAppender" >
<!-- GrayLog服务地址 -->
<host > udp:124.70.87.134</host>
<!-- GrayLog服务端口 -->
<port > 12201</port>
<version > 1.1</version>
<!-- 当前服务名称 -->
<facility > ${APP_NAME}</facility>
<extractStackTrace > true</extractStackTrace>
<filterStackTrace > true</filterStackTrace>
<mdcProfiling > true</mdcProfiling>
<timestampPattern > yyyy-MM-dd HH:mm:ss,SSS</timestampPattern>
<maximumMessageSize > 8192</maximumMessageSize>
</appender>
<!-- ======开发环境:打印控制台和输出到文件====== -->
<springProfile name= "dev" > <!-- 由application.yml中的spring.profiles.active配置 -->
<root level= "INFO" >
<appender-ref ref= "CONSOLE" />
<appender-ref ref= "INFO_FILE" />
<appender-ref ref= "ERROR_FILE" />
<appender-ref ref= "GELF" />
</root>
</springProfile>
<!-- ======测试环境:打印控制台和输出到文件====== -->
<springProfile name= "test" > <!-- 由application.yml中的spring.profiles.active配置 -->
<root level= "INFO" >
<appender-ref ref= "CONSOLE" />
<appender-ref ref= "DEBUG_FILE" />
<appender-ref ref= "INFO_FILE" />
<appender-ref ref= "ERROR_FILE" />
<appender-ref ref= "GELF" />
</root>
</springProfile>
<!-- ======生产环境:打印控制台和输出到文件====== -->
<springProfile name= "prod" > <!-- 由application.yml中的spring.profiles.active配置 -->
<root level= "INFO" >
<appender-ref ref= "CONSOLE" />
<appender-ref ref= "DEBUG_FILE" />
<appender-ref ref= "INFO_FILE" />
<appender-ref ref= "ERROR_FILE" />
<appender-ref ref= "GELF" />
</root>
</springProfile>
</configuration>