多台服务器日志查询
#!/bin/bash
list=$(curl -s http://xxxxxxxxx/api/domain/jump_hosts | grep -E "$1" | awk '{print $2":"$1" "}');
echo -en "match list:\n$list\n";
read -p "按下回车继续...";
basePath="/home/tomcat/www/";
suffix="/logs/"
for item in $list
do
ip=${item%%:*};
webPath=$(ssh $ip ls $basePath );
comm=$2" "$basePath$webPath$suffix$3" "$4;
echo -e "\n\n\nGooooooooTo: ${item#*:}";
echo "command: $comm";
echo "---------------------------------------------------------------------------";
ssh "${ip}" $comm;
echo -en "\n";
done
服务器跳转脚本
#!/bin/bash
jump=$(curl -s http://xxxxxxxxxx/api/domain/jump_hosts | grep "$1" | sort -k 1 | awk '{print $2":"$1" "}');
if [ ${#jump} -eq 0 ] ;then
echo "no match hosts.";
exit;
fi
fcount=0;
echo "match list:";
for item in $jump
do
fcount=`expr $fcount + 1`;
ip[++count]=$item;
printf " %d. %-40s%s\n" $count ${item#*:} ${item%:*};
done
echo ;
if [ $fcount -eq 1 ] ;then
realIp=${ip[1]%:*};
isNeed=$(ssh $realIp "cat ~/.bashrc | grep \"cd \" | awk '{count++}END{if(count==0){print \"1\";}else{print \"0\";}}'");
if [ $isNeed -eq 1 ] ;then
$(ssh $realIp "ls /home/tomcat/www/ | awk '{webName=\$0;}END{if(length(webName)>0){print \"\ncd /home/tomcat/www/\"webName\"/logs/\"}}' >> ~/.bashrc");
fi
echo -en "jump to ${ip[num]#*:}\n";ssh $realIp;
else
read -p "choice num:" num;
realIp=${ip[num]%:*};
isNeed=$(ssh $realIp "cat ~/.bashrc | grep \"cd \" | awk '{count++}END{if(count==0){print \"1\";}else{print \"0\";}}'");
if [ $isNeed -eq 1 ] ;then
$(ssh $realIp "ls /home/tomcat/www/ | awk '{webName=\$0;}END{if(length(webName)>0){print \"\ncd /home/tomcat/www/\"webName\"/logs/\"}}' >> ~/.bashrc");
fi
echo -en "jump to ${ip[num]#*:}\n";ssh $realIp;
fi
日志自动清理脚本(日志需有滚动切割)
#!/bin/bash
#加载bashrc,加载了才能ssh
source /home/kunjing/.bashrc;
#循环处理
jump=$(curl -s http://xxxxxxxxxxxxxx/api/domain/clean_hosts | grep "$1" | grep -v "es" | awk '{print $2":"$1" "}');
for item in $jump
do
nowTime=$(date "+%Y-%m-%d %H:%M:%S");
printf "[$nowTime] 开始清理: \e[1;34m%-40s\e[0m[%s]\n" ${item#*:} ${item%:*};
domain=${item#*:};
realIp=${item%:*};
webPath=$(ssh $realIp "ls /home/tomcat/www");
pathArr=($webPath);
len=${#pathArr[@]};
if [ $len -ne 1 ];then #只对有一个web应用的做处理
echo "$domain not in match rules,jump this.";
continue;
fi
fullPath="/home/tomcat/www/$webPath/logs/";
availablePercent=$(ssh $realIp df | grep /home/tomcat | awk '{precent=$4/$2}END{print precent}');
intervalDays=45;
echo -e "[${item#*:}] 可用容量占比:\e[1;34m $availablePercent \e[0m";
while [ `echo "$availablePercent < 0.3"|bc` -eq 1 ];do
#至少保证10天内的日志不能删
if [ $intervalDays -lt 10 ];then
break;
fi
limitDate=$(date -d -"$intervalDays"day +%Y%m%d);
echo "当前可用容量:[$availablePercent] ,需处理 [$fullPath] 下 [$limitDate] 之前的数据";
#read -p "Press enter to continue ...";
count=0;
list=$(ssh $realIp ls $fullPath --full-time |awk '{split($6,res,"-");if(length(res)==3)print res[1]res[2]res[3]":"$9}' | sort -n);
for item in $list
do
date=${item%:*};
fileName=${item#*:};
count=`expr $count + 1`;
if [ `echo "$date < $limitDate "|bc` -eq 1 ]; then
dealList+=" $fullPath"$fileName;
else
if [ ${#dealList} -gt 0 ]; then
commd="ssh $realIp sudo rm -f $dealList";
echo " finaly deal command: $commd";
$commd;
fi
break;
fi
if [ `echo "$count%10"|bc` -eq 0 ];then
commd="ssh $realIp sudo rm -f $dealList";
echo " deal command: $commd";
$commd;
dealList="";
fi
done
#获取最新空余容量占比,如果还未达到0.3以上,缩短limitDate继续删除
availablePercent=$(ssh $realIp df | grep /home/tomcat | awk '{precent=$4/$2}END{print precent}');
((intervalDays=$intervalDays-5));
done
echo -e "[$domain] 处理完毕,可用容量占比: \e[1;34m $availablePercent \e[0m ";
echo "--------------------------------------------------------------------------------------------";
done
ERROR日志巡逻(企业微信机器人报警)
#!/bin/bash
source /home/kunjing/.bashrc;
function error_scan(){
ip=${1%%:*};
domain=${1#*:};
rule=$(curl -s "http://xxxxxxxxxx/api/rules/get?domain=$domain");
comm="cat /home/tomcat/www/*/logs/catalina.out | grep -E \"^.{10,50}$2\" -A 3 | grep '' -n | $rule";
error_log_lines=$(ssh ${ip} "$comm" | awk -F ':' '{printf $1"\n"}');
if [ ${#error_log_lines} -gt 0 ];then
line_num_grep_str="";
count=0;
for line in $error_log_lines
do
line_num_grep_str="${line_num_grep_str}|^${line}:";
if [ $((++count)) -ge 5 ];then #最多5个ERROR
break;
fi
done
line_num_grep_str="${line_num_grep_str#*|}"
echo "$line_num_grep_str";
error_logs=$(ssh ${ip} "cat /home/tomcat/www/*/logs/catalina.out | grep -E \"^.{10,50}$2\" -A 3 | grep '' -n | grep -E \"$line_num_grep_str\" -A 3 | grep -E -v \"INFO|WARN|DEBUG|--\"");
error_logs=$(echo "$error_logs" | awk '{line=$0;gsub(/.{2}1;31m/,"",line);gsub(/.{2}0;39m/,"",line);ind=index(line,":")+1;line=substr(line,ind,512);printf line"\\n";}');
msg=${error_logs//\"/\\\"};
#长度限制
msg=${msg:0:2000}
wx_content="{\"msgtype\": \"text\",\"title\":\"错误日志\",\"text\": {\"content\": \"》$domain《\n——————————————————\n$msg\"}}";
echo "$wx_content";
curl 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=xxxxxxxxxxxxxxxxxxx' -H 'Content-Type: application/json' -d "$wx_content";
fi
}
#脚本最长执行30s,超过30s清理所有进程
#(sleep 30 && kill -9 $(ps -ef | grep $$ | awk '{printf " "$2}') )&
list=$(curl -s http://xxxxxxxxxx/api/domain/monitor_hosts | grep "$1" | awk '{print $2":"$1" "}');
last_min=$(date "+%H:%M" -d "-1 minute");
echo -en "\n[$last_min]开始排查ERROR日志--------------------------------------\n";
for item in $list
do
i=$((i+1));
#异步
{
error_scan "$item" "$last_min";
}&
#最多10个进程处理
if [ $((i%10)) -eq 0 ];then
wait;
fi
done
wait;
echo -en "\n[$last_min]ERROR日志排查完毕--------------------------------------\n\n";
网友评论