作为后端开发人员都知道记录日志是非常必要的。当程序出现问题时,进行问题的排查定位查询日志是必要的依据。因此公司都会有自己的日志管理系统,采用ELK生态进行自建或者采购云服务。
最早在项目中使用的是log4j,近几年接触到的项目都使用的是Logback,这也是SpringBoot默认支持的日志框架。因此本篇文章中的代码也是在该日志框架的基础上进行的编写。
在这篇文章中不会进行Logback各个配置的介绍。在本文中会介绍下搭建项目时如何将公共的代码从业务系统中进行剥离。本文主要包含如下内容:
-
记录http请求日志
- 获取ip地址
- 打印POST请求体
-
处理日志中的敏感信息
-
各个服务之间的日志串联
- OpenFeign服务间的调用
- 消息队列消费者
- 定时任务
在文章开始先给出一份完整的logback-common.xml文件,内容如下:
<?xml version="1.0" encoding="UTF-8"?>
<included>
<contextListener class="cn.yhzb.common.log.LogbackStartupListener"/>
<property name="log-pattern"
value="%clr([%d{yyyy-MM-dd HH:mm:ss.SSS}]){faint} %clr(%5p) %clr(---){faint} %clr(%-80.80logger{79}){cyan} %clr(:){faint} %m%n%wEx"/>
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter"/>
<conversionRule conversionWord="wex"
converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter"/>
<conversionRule conversionWord="wEx"
converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<layout class="ch.qos.logback.classic.PatternLayout">
<pattern>${log-pattern}</pattern>
</layout>
</appender>
<appender name="logfile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<append>true</append>
<file>/opt/app/logs/logstash.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>/opt/app/logs/logstash.%d{yyyyMMdd}.log</fileNamePattern>
</rollingPolicy>
<encoder class="cn.yhzb.common.log.YhzbLogstashEncoder">
<includeMdcKeyName>traceId</includeMdcKeyName>
</encoder>
</appender>
<!-- 异步输出 -->
<appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0</discardingThreshold>
<queueSize>256</queueSize>
<appender-ref ref="logfile"/>
</appender>
<appender name="requestlogfile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<append>true</append>
<file>/opt/app/logs/requestlog.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>/opt/app/logs/requestlog.%d{yyyyMMdd}.log</fileNamePattern>
</rollingPolicy>
<encoder class="net.logstash.logback.encoder.LogstashEncoder">
<includeMdcKeyName>ip</includeMdcKeyName>
<includeMdcKeyName>uri</includeMdcKeyName>
<includeMdcKeyName>method</includeMdcKeyName>
<includeMdcKeyName>param</includeMdcKeyName>
<includeMdcKeyName>postParam</includeMdcKeyName>
<includeMdcKeyName>code</includeMdcKeyName>
<includeMdcKeyName>cost</includeMdcKeyName>
<includeMdcKeyName>traceId</includeMdcKeyName>
</encoder>
</appender>
<!-- 异步输出http request请求 -->
<appender name="REQASYNC" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0</discardingThreshold>
<queueSize>256</queueSize>
<appender-ref ref="requestlogfile"/>
</appender>
<!--root-->
<!-- 开发环境 输出到控制台和文件 -->
<springProfile name="dev">
<root level="INFO">
<appender-ref ref="STDOUT"/>
<appender-ref ref="ASYNC"/>
</root>
<!--http请求拦截-->
<logger name="cn.yhzb.common.handler.RequestLogInterceptor" additivity="false">
<level value="INFO"/>
<appender-ref ref="STDOUT"/>
<appender-ref ref="REQASYNC"/>
</logger>
</springProfile>
<!-- 测试环境 输出info及以上日志 -->
<springProfile name="test,release">
<root level="INFO">
<appender-ref ref="STDOUT"/>
<appender-ref ref="ASYNC"/>
</root>
<!--http请求拦截-->
<logger name="cn.yhzb.common.handler.RequestLogInterceptor" additivity="false">
<level value="INFO"/>
<appender-ref ref="STDOUT"/>
<appender-ref ref="REQASYNC"/>
</logger>
</springProfile>
<!-- 线上环境 -->
<springProfile name="prod">
<root level="INFO">
<appender-ref ref="ASYNC"/>
</root>
<!--http请求拦截-->
<logger name="cn.yhzb.common.handler.RequestLogInterceptor" additivity="false">
<level value="INFO"/>
<appender-ref ref="REQASYNC"/>
</logger>
</springProfile>
</included>
接口请求日志
编写拦截器RequestLogInterceptor
该拦截器用于拦截过来的请求,获取响应的信息,根据各自需求记录响应的数据。示例中会记录如下信息:
- ip 客户端ip地址
- uri 请求地址
- method 请求方法
- param RequestParam数据
- postparam body体请求参数
- code 响应码
- cost 耗时
- traceId traceId用于串联各个服务间的调用日志
@Slf4j
@Component
public class RequestLogInterceptor implements AsyncHandlerInterceptor {
ThreadLocal<Long> startTimeThreadLocal = new ThreadLocal<>();
@Value("${request.log.postParam.ignore.urlList:}")
private List<String> ignorePostParamUrlList;
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
try {
long currentTimeMillis = System.currentTimeMillis();
startTimeThreadLocal.set(currentTimeMillis);
String requestURI = request.getRequestURI();
String ipAddr = IpUtil.getIpAddr(request);
String param = JSON.toJSONString(request.getParameterMap());
String method = request.getMethod();
String traceId = request.getHeader(YhzbConstant.TRACE_ID);
if (StringUtils.isBlank(traceId)) {
traceId = MDC.get(YhzbConstant.TRACE_ID);
if (StringUtils.isBlank(traceId)) {
traceId = UUIDUtil.createUUID();
}
}
if (request instanceof YhzbHttpServletRequestWrapper) {
YhzbHttpServletRequestWrapper requestWrapper = (YhzbHttpServletRequestWrapper) request;
String postParam = requestWrapper.getBody();
if (CollectionUtils.isEmpty(ignorePostParamUrlList) || !ignorePostParamUrlList.contains(requestURI)) {
if (StringUtils.isNotBlank(postParam)) {
MDC.put("postParam", postParam);
}
}
}
MDC.put("uri", requestURI);
MDC.put("ip", ipAddr);
MDC.put("param", param);
MDC.put("method", method);
MDC.put(YhzbConstant.TRACE_ID, traceId);
String userAgent = request.getHeader("user-agent");
String token = request.getHeader("token");
log.info("Request access info: uri:【{}】ip:【{}】param:【{}】method:【{}】userAgent:【{}】token:【{}】", requestURI, ipAddr, param, method, userAgent, token);
} catch (Exception e) {
log.error("request.log ", e);
}
return true;
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception {
try {
String requestURI = request.getRequestURI();
String ipAddr = IpUtil.getIpAddr(request);
String param = JSON.toJSONString(request.getParameterMap());
String method = request.getMethod();
int responseCode = response.getStatus();
Long startTime = startTimeThreadLocal.get();
Long cost = startTime == null ? 0L : System.currentTimeMillis() - startTime;
MDC.put("code", responseCode + "");
MDC.put("cost", cost + "");
log.info("Request response info: uri:【{}】ip:【{}】param:【{}】method:【{}】response code:【{}】cost:【{}ms】", requestURI, ipAddr, param, method, responseCode, cost);
startTimeThreadLocal.remove();
MDC.clear();
} catch (Exception e) {
log.error("request.log ", e);
}
}
}
获取ip地址
Nginx配置中增加如下配置
proxy_set_header Host $host;
proxy_set_header X-Real-Ip $remote_addr;
proxy_set_header X-Forwarded-For $remote_addr;
编写IpUtil工具类,提供通过HttpServletRequest获取ip地址的方法,代码如下:
@Slf4j
public class IpUtil {
/**
* 获取请求的ip地址
*
* @param request HttpServletRequest对象
* @return ip地址
*/
public static String getIpAddr(HttpServletRequest request) {
String ipAddr;
try {
ipAddr = request.getHeader("x-forwarded-for");
if (StringUtils.isBlank(ipAddr) || "unknown".equalsIgnoreCase(ipAddr)) {
ipAddr = request.getHeader("Proxy-Client-IP");
}
if (StringUtils.isBlank(ipAddr) || "unknown".equalsIgnoreCase(ipAddr)) {
ipAddr = request.getHeader("WL-Proxy-Client-IP");
}
if (StringUtils.isBlank(ipAddr) || "unknown".equalsIgnoreCase(ipAddr)) {
ipAddr = request.getRemoteAddr();
}
if (Objects.equals(ipAddr, "0:0:0:0:0:0:0:1")) {
ipAddr = "127.0.0.1";
}
if (ipAddr != null && ipAddr.length() > 15 && ipAddr.indexOf(",") > 0) {
ipAddr = ipAddr.substring(0, ipAddr.indexOf(","));
}
} catch (Exception e) {
log.error("获取请求ip地址发生异常: ", e);
ipAddr = "0.0.0.0";
}
return ipAddr;
}
}
@Configuration
@Order(-1)
public class RequestLogConfig implements WebMvcConfigurer {
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(new RequestLogInterceptor());
}
}
获取请求体
由于输入流只能读取一次,若果直接从Request中获取请求体,那么到后续接口上就获取不到内容了,解决这个问题需要我们自定义HttpServletRequestWrapper,代码如下:
public class YhzbHttpServletRequestWrapper extends HttpServletRequestWrapper {
private byte[] body;
public YhzbHttpServletRequestWrapper(HttpServletRequest request) throws IOException {
super(request);
BufferedReader reader = request.getReader();
try (StringWriter writer = new StringWriter()) {
char[] buf = new char[8192];
int read;
while ((read = reader.read(buf)) != -1) {
writer.write(buf, 0, read);
}
this.body = writer.getBuffer().toString().getBytes();
}
}
public String getBody() {
return new String(this.body, StandardCharsets.UTF_8);
}
public ServletInputStream getInputStream() throws IOException {
ByteArrayInputStream inputStream = new ByteArrayInputStream(this.body);
return new ServletInputStream() {
@Override
public boolean isFinished() {
return false;
}
@Override
public boolean isReady() {
return false;
}
@Override
public void setReadListener(ReadListener readListener) {
}
@Override
public int read() throws IOException {
return inputStream.read();
}
};
}
public BufferedReader getReader() throws IOException {
return new BufferedReader(new InputStreamReader(this.getInputStream()));
}
}
自定义过滤器,拦截request请求,将请求对象转换为上一步自定义的HttpServletRequestWrapper对象,代码如下:
@Component
public class LogRequestFilter implements Filter {
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
ServletRequest requestWrapper = null;
if (servletRequest instanceof HttpServletRequest) {
HttpServletRequest request = (HttpServletRequest) servletRequest;
if (request.getMethod().equals("POST")) {
String contentType = request.getContentType();
if (StringUtils.isBlank(contentType) || !contentType.startsWith("multipart/")) {
requestWrapper = new YhzbHttpServletRequestWrapper(request);
}
}
}
if (requestWrapper == null) {
filterChain.doFilter(servletRequest, servletResponse);
} else {
filterChain.doFilter(requestWrapper, servletResponse);
}
}
}
处理打印日志中的敏感信息
项目中的一些敏感信息(手机号、身份证号等信息)不能直接打印到日志文件中,需要经过一些处理。在这里以手机号为例,将手机号中间几位替换为*。
编写自定义MessageJsonProvider类,代码如下:
public class LogEncryptJsonProvider extends MessageJsonProvider {
private static final String PHONE_REGEX = "(\d{1,4}[- ]\d{6,16})|((1[3-9])\d{9})";
private static final Pattern PHONE_PATTERN = Pattern.compile(PHONE_REGEX);
private static final int MASK_SIZE_FROM = 4;
private static final int MASK_SIZE_TO = 7;
@Override
public void writeTo(JsonGenerator generator, ILoggingEvent event) throws IOException {
JsonWritingUtils.writeStringField(generator, this.getFieldName(), filterMobile(event.getFormattedMessage()));
}
private static String filterMobile(String num) {
if (StringUtils.isBlank(num)) {
return num;
}
Matcher matcher = PHONE_PATTERN.matcher(num);
StringBuffer sb = new StringBuffer();
while (matcher.find()) {
String hidePone = maskPhone(matcher.group());
matcher.appendReplacement(sb, hidePone);
}
matcher.appendTail(sb);
return sb.toString();
}
private static String maskPhone(String phone) {
if (phone != null && phone.length() > MASK_SIZE_TO) {
char[] chars = phone.toCharArray();
for (int i = MASK_SIZE_FROM; i <= MASK_SIZE_TO; i++) {
chars[i] = '*';
}
return String.valueOf(chars);
}
return phone;
}
}
编写自定义LogstashEncoder类,使用上面的MessageJsonProvider类
public class YhzbLogstashEncoder extends LogstashEncoder {
@Override
protected CompositeJsonFormatter<ILoggingEvent> createFormatter() {
LogstashFormatter formatter = new LogstashFormatter(this);
LogEncryptJsonProvider provider = new LogEncryptJsonProvider();
List<JsonProvider<ILoggingEvent>> providers = formatter.getProviders().getProviders().stream().filter(p -> p instanceof MessageJsonProvider).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(providers)) {
providers.forEach(p -> formatter.getProviders().removeProvider(p));
}
formatter.addProvider(provider);
return formatter;
}
}
微服务架构间的日志串联
在微服务环境下,我们需要一个traceId的数据将各个服务之间的日志进行关联。这样我们查询日志时通过该字段便可将各个服务间的日志串联起来,更方便我们进行问题排查。
主要考虑4种情况下的traceId传递,如下:
- http接口层
- 使用
OpenFeign的服务间调用 - 消息队列消费者
- 定时任务
http接口层
这块的代码在上面的接口请求日志部分已经粘贴了代码,逻辑如下:
- 针对拦截的请求首先从请求头中获取
- 请求头中不存在就生成一个
OpenFeign的调用
在发起http请求时,需要携带请求头,将traceId传递过去。
编写FeignRequestInterceptor和FeignConfig类,代码如下:
@Slf4j
public class FeignRequestInterceptor implements RequestInterceptor {
@Value("${spring.application.name:}")
private String applicationName;
@Override
public void apply(RequestTemplate requestTemplate) {
requestTemplate.header(RiseConstant.FROM_APPLICATION_NAME, applicationName);
String traceId = MDC.get(RiseConstant.TRACE_ID);
if (StringUtils.isBlank(traceId)) {
traceId = UUIDUtil.createUUID();
}
requestTemplate.header(RiseConstant.TRACE_ID, traceId);
String token = MDC.get(RiseConstant.TOKEN);
if (token != null) {
requestTemplate.header(RiseConstant.TOKEN, token);
}
}
}
@Configuration
@ConditionalOnClass(value = RequestInterceptor.class)
public class FeignConfig {
@Bean
public RequestInterceptor requestInterceptor() {
return new FeignRequestInterceptor();
}
}
消息队列消费者
由于我项目中一直使用的是RocketMQ,所以示例中的代码针对的是该消息队列的实现。
为了不用在每个消费者中编写这些相同的代码,我们需要抽出一个工具类。编写MqUtil,代码如下:
@Slf4j
public class MqUtil {
public static <T> ConsumeResult handleMessage(MessageView messageView, String consumerClassName, TypeReference<T> messageType, DealWithParam<T> deal) {
String traceId = MDC.get(RiseConstant.TRACE_ID);
if (StringUtils.isBlank(traceId)) {
MDC.put(RiseConstant.TRACE_ID, UUIDUtil.createUUID());
}
if (messageView == null || messageView.getBody() == null) {
log.warn("consumer {} message or message body is null", consumerClassName);
return ConsumeResult.SUCCESS;
}
long start = System.currentTimeMillis();
ByteBuffer bodyBuffer = messageView.getBody();
String message = StandardCharsets.UTF_8.decode(bodyBuffer).toString();
log.info("Start consumer {} topic:{} messageId:{} messageBody:{}", consumerClassName, messageView.getTopic(), messageView.getMessageId(), message);
T messageDTO;
try {
messageDTO = JSON.parseObject(message, messageType);
} catch (Exception e) {
log.error("{} cannot parse {}. {}", messageView.getTopic(), messageType.getType().getTypeName(), message);
return ConsumeResult.SUCCESS;
}
try {
deal.deal(messageDTO);
log.info("End consumer {} topic:{} messageId:{} messageBody:{} cost:{}ms", consumerClassName, messageView.getTopic(), messageView.getMessageId(), message, System.currentTimeMillis() - start);
return ConsumeResult.SUCCESS;
} catch (Exception e) {
log.error("consumer:{} fail topic:{} messageId:{} messageBody:{}: ", consumerClassName, messageView.getTopic(), messageView.getMessageId(), message, e);
return ConsumeResult.FAILURE;
}
}
}
定时任务
这里以XXL-Job为例,同样为了减少相同代码的编写,我们这里定义一个JobHandlerUtil类,代码如下:
@Slf4j
public class JobHandlerUtil {
public static void execute(String handlerName, DealWithParam<String> deal) {
String traceId = MDC.get("traceId");
if (StringUtils.isBlank(traceId)) {
MDC.put("traceId", UUIDUtil.createUUID());
}
long start = System.currentTimeMillis();
String param = XxlJobHelper.getJobParam();
log.info("开始执行定时任务:【{}】,请求参数为:【{}】", handlerName, param);
try {
deal.deal(param);
log.info("执行定时任务:【{}】成功,耗时:【{}ms】", handlerName, System.currentTimeMillis() - start);
} catch (Exception e) {
log.error("执行定时任务:【{}】失败: ", handlerName, e);
}
}
}
记录服务名称
处理使用traceId进行日志间的串联外,在微服务架构中,我们还需记录下日志所属的服务名称。
编写LogConfig和LogbackStartupListener类,代码如下:
@Slf4j
public class LogConfig {
@Getter
private static String appName = "";
static {
try {
Map<String, String> envMap = System.getenv();
appName = envMap.get("APP_NAME") == null ? "aaa" : envMap.get("APP_NAME");
} catch (Exception e) {
log.error("获取环境变量失败,logback日志文件拼接会缺少数据:", e);
}
}
}
public class LogbackStartupListener extends ContextAwareBase implements LoggerContextListener, LifeCycle {
private boolean started = false;
public void start() {
if (!this.started) {
Context context = this.getContext();
context.putProperty("APP_NAME", LogConfig.getAppName());
this.started = true;
}
}
public void onReset(LoggerContext context) {
}
public void onStart(LoggerContext context) {
}
public void onStop(LoggerContext context) {
}
public void onLevelChange(Logger logger, Level level) {
}
public boolean isResetResistant() {
return true;
}
public void stop() {
}
public boolean isStarted() {
return this.started;
}
}
项目使用Docker部署的话,只需在Dockerfile中增加一行配置即可
ENV APP_NAME XXXXXXX
到这里代码便编写完成了,目录结构如下:
项目中使用
在项目中使用也非常方便,将上面的文件目录添加到SpringBoot的扫描路径,同时在各项目的classpath下创建logback-spring.xml和logback-app.xml文件即可,文件内容如下:
<!-- logback-spring.xml -->
<?xml version="1.0" encoding="UTF-8" ?>
<configuration scan="false" debug="false">
<include resource="cn/yhzb/common/log/logback-common.xml"/>
<include resource="logback-app.xml"/>
</configuration>
<!-- logback-app.xml -->
<?xml version="1.0" encoding="UTF-8"?>
<included>
<springProfile name="dev,test,release,prod">
<root level="INFO">
<appender-ref ref="STDOUT"/>
<appender-ref ref="ASYNC"/>
</root>
<logger name="cn.yhzb.common.handler.RequestLogInterceptor" addtivity="false">
<level value="INFO"/>
<appender-ref ref="STDOUT"/>
<appender-ref ref="REQASYNC"/>
</logger>
<logger name="cn.yhzb.domain.mapper" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="STDOUT"/>
<appender-ref ref="ASYNC"/>
</logger>
<logger name="cn.yhzb.common" additivity="false">
<level value="INFO"/>
<appender-ref ref="STDOUT"/>
<appender-ref ref="ASYNC"/>
</logger>
<logger name="cn.yhzb" additivity="false">
<level value="INFO"/>
<appender-ref ref="ASYNC"/>
</logger>
</springProfile>
<springProfile name="prod">
<root level="INFO">
<appender-ref ref="ASYNC"/>
</root>
<logger name="cn.yhzb.common.handler.RequestLogInterceptor" addtivity="false">
<level value="INFO"/>
<appender-ref ref="REQASYNC"/>
</logger>
<logger name="cn.yhzb" additivity="false">
<level value="INFO"/>
<appender-ref ref="ASYNC"/>
</logger>
<logger name="cn.yhzb.common" additivity="false">
<level value="INFO"/>
<appender-ref ref="ASYNC"/>
</logger>
</springProfile>
</included>
这样我们就完成了相关代码的编写,在配置文件中我们指定了将日志文件存储到/opt/app/logs/目录下,可以自行进入该目录查看。
如果本文对您有些许的帮助,请大方的关注作者吧。
欢迎大家关注我的公众号为我加油打气:【Bug搬运小能手】。文章会优先在公众号发布。