日志链路追踪

39 阅读1分钟

1、引入依赖

<!--SkyWalKing日志采集-->
<skywalking-version>8.7.0</skywalking-version>

<!--SkyWalKing日志采集-->
<dependency>
    <groupId>org.apache.skywalking</groupId>
    <artifactId>apm-toolkit-trace</artifactId>
    <version>${skywalking-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.skywalking</groupId>
    <artifactId>apm-toolkit-logback-1.x</artifactId>
    <version>${skywalking-version}</version>
</dependency>

2、拦截器设置traceId

package com.hntec.nucs.pay.center.config;

import lombok.extern.slf4j.Slf4j;
import org.slf4j.MDC;
import org.springframework.stereotype.Component;
import org.springframework.web.filter.AbstractRequestLoggingFilter;

import javax.servlet.http.HttpServletRequest;
import java.util.UUID;

@Component
@Slf4j
public class TraceFilter extends AbstractRequestLoggingFilter {

    @Override
    protected void beforeRequest(HttpServletRequest request, String message) {
        String traceId = UUID.randomUUID().toString().replace("-", "");
        MDC.put("traceId", traceId);
        log.info("Debug - Current traceId: {}", traceId);
    }

    @Override
    protected void afterRequest(HttpServletRequest request, String message) {
        MDC.remove("traceId");
        MDC.clear();
    }
}

3、配置log4j2.xml,链路主要是%X{traceId}配置,行号%L,要设置 includeLocation="true"才生效。

<?xml version="1.0" encoding="UTF-8"?>
<!--日志级别以及优先级排序: OFF > FATAL > ERROR > WARN > INFO > DEBUG > TRACE > ALL -->
<!-- Configuration 后面的 status 用于设置 log4j2 自身内部的信息输出,可以不设置,当设置成 trace 时,可以看到 log4j2 内部各种详细输出-->
<configuration status="INFO">

    <!-- 日志文件目录和压缩文件目录配置 -->
    <Properties>
        <Property name="fileName">logs/nucs-market</Property>
        <Property name="fileGz">logs/nucs-market</Property>
        <property name="PATTERN">%d [%X{traceId}]-%-5level [%t] %logger{1.}:[%L] -%msg%n
        </property>
        <!--<property name="PATTERN">%highlight{%d  %-5level [%22t] %logger{1.}:%L - %msg%n}{%throwable{short.fileName}}{FATAL=white,ERROR=red, WARN=blue, INFO=Normal, DEBUG=green, TRACE=blue}</property>-->
        <!--<property name="PATTERN">%highlight{%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n}</property>-->
    </Properties>

    <!--先定义所有的 appender-->
    <Appenders>
        <!-- 移除这里的 includeCallerData -->
        <Console name="console" target="SYSTEM_OUT">
            <PatternLayout pattern="${PATTERN}" />
        </Console>

        <!-- 打印出所有的信息,每次大小超过size,则这size大小的日志会自动存入按年份-月份建立的文件夹下面并进行压缩,作为存档 -->
        <RollingRandomAccessFile name="infoFile" fileName="${fileName}/nucs-market-info.log"
                                 immediateFlush="false"
                                 filePattern="${fileGz}/$${date:yyyy}/$${date:yyyy-MM}/%d{yyyy-MM-dd-}-%i.nucs-market-info.gz">
            <PatternLayout pattern="${PATTERN}"/>

            <Policies>
                <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
                <SizeBasedTriggeringPolicy size="100 MB"/>
            </Policies>

            <Filters>
                <!-- 只记录info和warn级别信息 -->
                <ThresholdFilter level="debug" onMatch="ACCEPT" onMismatch="DENY"/>
                <ThresholdFilter level="error" onMatch="DENY" onMismatch="NEUTRAL"/>
                <ThresholdFilter level="info" onMatch="ACCEPT" onMismatch="DENY"/>
            </Filters>

            <!-- 指定每天的最大压缩包个数,默认7个,超过了会覆盖之前的 -->
            <DefaultRolloverStrategy max="1000"/>
        </RollingRandomAccessFile>

        <!-- 存储所有error信息 -->
        <RollingRandomAccessFile name="errorFile" fileName="${fileName}/nucs-market-error.log"
                                 immediateFlush="false"
                                 filePattern="${fileGz}/$${date:yyyy}/$${date:yyyy-MM}/%d{yyyy-MM-dd-}-%i.nucs-market-error.gz">
            <PatternLayout pattern="${PATTERN}"/>

            <Policies>
                <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
                <SizeBasedTriggeringPolicy size="100 MB"/>
            </Policies>

            <Filters>
                <!-- 只记录error级别信息 -->
                <ThresholdFilter level="error" onMatch="ACCEPT" onMismatch="DENY"/>
            </Filters>

            <!-- 指定每天的最大压缩包个数,默认7个,超过了会覆盖之前的 -->
            <DefaultRolloverStrategy max="1000"/>
        </RollingRandomAccessFile>
    </Appenders>
    <!--然后定义 logger,只有定义了 logger 并引入的 appender,appender 才会生效-->
    <!--root:用于指定项目的根日志,如果没有单独指定 Logger,则会使用 root 作为默认的日志输出-->

    <Loggers>

        <!-- 添加 includeLocation 属性 -->
        <AsyncLogger name="org.springframework" level="info" includeLocation="true"/>
        <AsyncLogger name="serviceStatsLog" level="error" includeLocation="true"/>
        <AsyncLogger name="info" level="error" includeLocation="true"/>
        <AsyncLogger name="org.apache.kafka" level="INFO" includeLocation="true"/>
        <AsyncLogger name="druid.sql.Statement" level="DEBUG" includeLocation="true"/>

        <!-- Root Logger 也需要添加 includeLocation -->
        <AsyncRoot level="info" includeLocation="true">
            <AppenderRef ref="console"/>
            <AppenderRef ref="infoFile"/>
            <AppenderRef ref="errorFile"/>
        </AsyncRoot>
    </Loggers>
</configuration>