本文已参与「新人创作礼」活动,一起开启掘金创作之路。
包括:每小时的访问量、每小时状态码分组统计、每分钟处理时间大于7秒的统计、每小时统计阶段耗时和平均耗时、统计JSON中字段值,出现的次数,及拼sql语句、统计0点到12点的统计数据 本文已参与「新人创作礼」活动,一起开启掘金创作之路。
常用命令
# 查找目录下的所有文件中是否含有某个字符串,只打印出文件名称
find . | xargs grep -ri 'string' -l
# 文件字符串替换
sed -i 's/oldStr/newStr/g' demo
# 将worker-5的异常前后20行保存到文件中
cat all.log.2015-08-26 | grep 'schedulerFactoryBean_Worker-5] - 异常信息' -A 20 -B 20 > worker-5-exception.log
# 查询文件中时间段的内容
sed -n '/2014-05-16/,/2014-05-17/p' catalina.out > 20140516tomcat.log
grep "2014-07-23 13:[00-59]" 20140514tomcat.log >05-14.log
# 正则匹配
egrep '(-开始)|(-结束)' p.log > begin-end.log
# 昨天这个时候的时间
date -d"yesterday" +"%F %H:%M:%S"
# 创建软连接
ln -s 目标文件 连接文件
#查看CPU性能
vmstat 1
#显示CPU数,ALL为索引
mpstat -P ALL 1
#查看I/O性能
iostat -m -x 1
# 打印九九乘法表
seq 9 | sed 'H;g' | awk -v RS='' '{for(i=1;i<=NF;i++)printf("%dx%d=%d%s", i, NR, i*NR, i==NR?"\n":"\t")}'
# 查看TCP连接状态
netstat -an|awk '/tcp/ {print $6}'|sort|uniq -c
netstat -n | awk '/^tcp/ {++S[$NF]} END {for(a in S) print a, S[a]}'
awk做日志分析示例
### 每小时的访问量
awk '{print $5}' /data/log/nginxlog/nginx.log|awk -F: '{s[$1":"$2]++}END{for(a in s){print a,s[a]}}'|sort
### 每小时状态码分组统计
awk '{print $5":"$10}' /data/log/nginxlog/nginx.log|awk -F: '{s[$1":"$2":"$5]++}END{for(a in s){print a,s[a]}}'|sort
### 每分钟处理时间大于7秒的统计
grep '2021-02-02 00' /data/log/service/service.2021-02-02.log |grep 'ServiceImpl'| awk '{print $1,$2":"strtonum($12)}' |awk -F: '{if($4>7000){s[$1":"$2]++}}END{for(a in s){print a,s[a]}}'|sort
### 每小时统计阶段耗时和平均耗时[hour,gds_ms,jaxb_ms,parser_ms,spendms,count,spendms/1000]
grep 'INFO c.f.s.s.api.service.impl.ServiceImpl ' /data/log/service/service.2019-04-24.log \
| awk '{split($2,time,":");split($9,gds,":");split($10,jaxb,":");split($11,parse,":");split($13,count,":");{print $1 "-" time[1] " " strtonum(gds[2]) " " strtonum(jaxb[2]) " " strtonum(parse[2]) " " strtonum(count[2])}}' \
| awk '{gds[$1]=gds[$1]+$2;jaxb[$1]=jaxb[$1]+$3;parser[$1]=parser[$1]+$4;count[$1]=count[$1]+$5;size[$1]=size[$1]+1};END{for(i in gds)print i,gds[i],jaxb[i],parser[i],(gds[i]+jaxb[i]+parser[i]),count[i],size[i],(gds[i]+jaxb[i]+parser[i])/1000 | "sort"}'
### 每小时统计阶段耗时和平均耗时,加条件
grep 'INFO c.f.s.s.api.service.impl.ServiceImpl ' /data/log/service/service.2019-04-24.log \
| awk '{split($2,time,":");split($9,gds,":");split($10,jaxb,":");split($11,parse,":");split($13,count,":");{if(strtonum(gds[2])>7000){print $1 "-" time[1] "#" $7 " " strtonum(gds[2]) " " strtonum(jaxb[2]) " " strtonum(parse[2]) " " strtonum(count[2])}}}' \
| awk '{gds[$1]+=$2;jaxb[$1]+=$3;parser[$1]+=$4;count[$1]+=$5;size[$1]+=1};END{for(i in gds)print (gds[i]+jaxb[i]+parser[i]),(gds[i]+jaxb[i]+parser[i])/size[i],size[i],i | "sort -nr"}' \
| head -n 100 >> stat
### 统计单个小时航线查询量
grep 'INFO' /data/log/service/service.2019-11-11.log|awk -F: '{split($6,fromto,"|");{print $1,fromto[1]}}'|awk -F: '{sum[$1]+=1}END{for(c in sum){print c,sum[c]}}'|sort -k 4 -nr|head
### 统计每小时超时数
grep 'INFO' /data/log/service/service.2019-11-11.log|awk -F: '{split($5,detail,",");{print $1,strtonum(detail[1])}}'| awk '{if(strtonum($3)>7000){print $1,$2,"timeout"}else{print $1,$2,"ok"}}'|awk -F: '{sum[$1]+=1}END{for(c in sum){print c,sum[c]}}'|sort
### 统计JSON中字段值,出现的次数,及拼sql语句
grep 'GrabClient-RESP-booking' /data/log/java_springcloud/light-booking-service/light-booking-service.2022-09-02.log \
| awk -F '[:,"}]' '{for(i=1;i<=NF;i++){if($i == "sessionId" || $i == "pnrCode" || $i == "pnrIpcc") print $(i+3)}}' \
| awk '{if(NR%3==0){printf $0 "\n"}else{printf "%s ",$0}}' \
| awk -F ' ' '{print $3,$1,$2}' \
| sort -k 1 -t " " \
| awk '{a[$1]=a[$1]?a[$1]FS$2:$2}END{for(i in a)print i,a[i]}' \
| awk '{if(NF > 2){print (NF-1),$0}}' \
| awk '{print "insert into jishubu.ticket_order (day,c,session_id,pnrs) values (\"2022-04-02\","$1",\""$2"\",\""$0"\");"}'
# 统计0点到12点的统计数据
sed -n '/2019-04-24 00/,/2019-04-24 12/p' 2019-04-24.log | grep 'INFO ServiceImpl' \
| awk '{split($2,time,":");split($9,gds,":");split($13,count,":");{print $1 "-" time[1] " " strtonum(gds[2])}}' \
| awk '{gds[$1]=gds[$1]+$2;count[$1]=count[$1]+$5;size[$1]=size[$1]+1};END{for(i in gds)print i,gds[i],count[i],size[i] | "sort"}'