配置基于elastic 8.11版本
filebeat配置
filebeat.inputs:
- type: log
enabled: true
paths:
- /share/project/logs/user-api.log
# 添加自定义字段
fields:
serviceEnv: "prod" # 日志所属的环境
serviceName: "user-api" # 日志所属服务
multiline.pattern: '^\d{4}-\d{2}-\d{2}' # 匹配日志行的开始位置(此处使用时间戳模式)
multiline.negate: true # 设置为 true,表示匹配多行的行不会被视为新事件的开头
multiline.match: after # 所有符合匹配模式的行和其后的行都将被合并到同一个事件中
- type: log
enabled: true
paths:
- /share/project/logs/storage-api.log
fields:
serviceEnv: "test"
serviceName: "storage-api"
multiline.pattern: '^\d{4}-\d{2}-\d{2}'
multiline.negate: true
multiline.match: after
- type: log
enabled: true
paths:
- /share/project/logs/auth-api.log
fields:
serviceEnv: "dev"
serviceName: "auth-api"
multiline.pattern: '^\d{4}-\d{2}-\d{2}'
multiline.negate: true
multiline.match: after
output.logstash:
hosts: ["192.168.10.101:5044"]
logstash配置
input {
beats {
port => 5044
}
}
filter {
if [fields][serviceName] in ["user-api","storage-api","auth-api"] {
grok {
match => {
# (?m) 表示开启多行匹配
# %d{yyyy-MM-dd HH:mm:ss.SSS} [%X{traceId}] %-5.5level ${PID} --- [%15.15thread] %logger{20} %5.5line : %msg%n
"message" => "(?m)%{TIMESTAMP_ISO8601:recordTime} \[%{DATA:traceId}\] %{LOGLEVEL:level} %{DATA:pid} --- \[%{DATA:thread}\] %{DATA:logger} %{NUMBER:line} : %{GREEDYDATA:message}"
}
}
} else {
grok {
match => {
# (?m) 表示开启多行匹配
# #%d{yyyy-MM-dd HH:mm:ss.SSS} %-5.5level ${PID} --- [%15.15thread] %logger{20} %5.5line : %msg%n
"message" => "(?m)%{TIMESTAMP_ISO8601:recordTime} %{LOGLEVEL:level} %{DATA:pid} --- \[%{DATA:thread}\] %{DATA:logger} %{NUMBER:line} : %{GREEDYDATA:message}"
}
}
}
# 使用 date 过滤器将日期字段解析为 @timestamp
date {
match => ["recordTime", "yyyy-MM-dd HH:mm:ss.SSS"]
target => "@timestamp"
timezone => "Asia/Shanghai"
}
mutate {
add_field => {
"hostName" => "%{[host][name]}"
"[data_stream][type]" => "logs"
"[data_stream][dataset]" => "qserver-%{[fields][serviceEnv]}"
"[data_stream][namespace]" => "%{[fields][serviceName]}"
}
remove_field => [ "host","ecs","event","agent","tags","fields","@version","input","log","recordTime"]
strip => ["level", "thread","logger","pid"]
}
}
output {
stdout {}
elasticsearch {
hosts => ["http://dawn100.dawn.com:9200","http://dawn101.dawn.com:9200","http://dawn102.dawn.com:9200"]
data_stream => "true"
data_stream_sync_fields => "false"
}
}
创建索引生命周期策略
PUT _ilm/policy/logs-qserver-policy
{
"policy": {
"phases": {
"hot": {
"min_age": "0ms",
"actions": {
"rollover": {
"max_size": "10GB",
"max_age": "7d"
},
"set_priority": {
"priority": 100
}
}
},
"warm": {
"min_age": "7d",
"actions": {
"forcemerge": {
"max_num_segments": 1
},
"shrink": {
"number_of_shards": 1
}
}
},
"cold": {
"min_age": "60d",
"actions": {}
},
"delete": {
"min_age": "120d",
"actions": {
"delete": {}
}
}
}
}
}
创建索引模板
PUT /_index_template/logs-qserver-template
{
"index_patterns": [
"logs-qserver-*"
],
"data_stream": {},
"priority": 500,
"template": {
"settings": {
"index.lifecycle.name": "logs-qserver-policy",
"index.number_of_replicas": "1",
"index.number_of_shards": "6",
"analysis": {
"analyzer": {
"cld_analyzer": {
"type": "custom",
"char_filter": [
"cld_filter"
],
"tokenizer": "cld_tokenizer"
}
},
"char_filter": {
"cld_filter": {
"pattern": """^.+\.""",
"type": "pattern_replace",
"replacement": ""
}
},
"tokenizer": {
"cld_tokenizer": {
"type": "keyword"
}
}
}
},
"mappings": {
"properties": {
"@timestamp": {
"type": "date"
},
"hostName": {
"type": "keyword"
},
"level": {
"type": "keyword"
},
"line": {
"type": "keyword"
},
"logger": {
"type": "text"
},
"message": {
"type": "text"
},
"pid": {
"type": "keyword"
},
"serviceName": {
"type": "keyword"
},
"thread": {
"type": "keyword"
},
"traceId": {
"type": "keyword"
}
}
}
}
}
配置kibana中@timestamp的日期格式
- 设置展示值为:yyyy-MM-DD HH:mm:ss.SSS