技术联盟

配置查询Nginx中异常IP处理

很多时候会发现好多,异常流量,处理这些异常流量,通常是封禁IP

# deny_ip_o.sh  文件内容
#!/bin/bash
max=500
#我们设定的最大值,当访问量大于这个值得时候,封锁
logdir=/opt/nginx/logs/access.log
#nginx 访问日志文件路径
confdir=/opt/nginx/logs/nginx_deny.log
#检测文件
test -e ${confdir} || touch ${confdir}
port=443
drop_Ip=""
#循环遍历日志文件取出访问量大于 500 的 ip
for drop_Ip  in $(tail -n500000  $logdir | awk '{print $1}' | sort | uniq -c | sort -rn  | awk '{if ($1>500) print $2}')
do
  echo "${drop_Ip}---${drop_ip}"
  grep  -q  "${drop_Ip}" ${confdir} && eg=1 || eg=0;
  if (( ${eg}==0 ));then
	iptables -I INPUT -p tcp --dport ${port} -s ${drop_Ip} -j DROP
	echo ">>>>> `date '+%Y-%m-%d %H%M%S'` - 发现攻击源地址 ->  ${drop_Ip} " >> /opt/nginx/logs/nginx_deny.log  #记录 log
  fi
done

####################################################################
#!/bin/bash
#_日志位置
_log_Path="/data0/nginx/weblogs/"
#_日志文件名称
_log_FileName="access_blog.kinggoo.com.log"
#_要被屏蔽的ip访问端口,默认80
_port="80"
_nginx_deny="/opt/webserver/nginx/conf/deny.conf"
_nginx_bin="/opt/webserver/nginx/sbin/nginx"
_logfilepath=${_log_Path}${_log_FileName}
#初始化被禁ip变量 
_drop_Ip=""
#检测文件
test -e ${_nginx_deny} || touch ${_nginx_deny} 
for _drop_Ip in $( tail -n50000 "${_logfilepath}" |awk '{print $1,$12}' |grep -i -v -E "google|yahoo|baidu|msnbot|FeedSky|sogou|WordPress" |awk '{print $1}'|sort|uniq -c|sort -rn  |awk '{if($1>1000)print ""$2""}' );
do
         grep  -q  "${_drop_Ip}" ${_nginx_deny} && eg=1 || eg=0 ;
         if (( ${eg}==0 ));then
                echo  "deny ${_drop_Ip};" >> ${_nginx_deny}
                ${_nginx_bin} -s  reload
                iptables -I INPUT -p tcp -dport ${_port} -s ${_drop_Ip} -j DROP
                echo ">>>>> `date '+%Y-%m-%d %H%M%S'` - 发现攻击源地址 ->  ${_drop_Ip} " >> /tmp/nginx_deny.log;
                echo "iptables -I INPUT -p tcp --dport ${_port} -s ${_drop_Ip} -j DROP" >> /tmp/nginx_deny.log
         fi
done


使用 nginx 封锁

...

-封锁 IP

#!/bin/bash
max=500    #我们设定的最大值,当访问量大于这个值得时候,封锁
confdir=/usr/local/data/nginx/conf/blockip.conf #nginx 封锁配置文件路径
logdir=/usr/local/data/nginx/logs/access_huke88.log  #nginx 访问日志文件路径
#检测文件
test -e ${confdir} || touch ${confdir}
drop_ip=""
#循环遍历日志文件取出访问量大于 500 的 ip
for drop_ip  in $(cat $logdir | awk '{print $1}' | sort | uniq -c | sort -rn  | awk '{if ($1>500) print $2}')
do
  grep  -q  "${drop_Ip}" ${confdir} && eg=1 || eg=0;
  if (( ${eg}==0 ));then
     echo "deny ${drop_Ip};">>$confdir  #把“ deny IP ;”语句写入封锁配置文件中
     echo ">>>>> `date '+%Y-%m-%d %H%M%S'` - 发现攻击源地址 ->  ${drop_Ip} " >> /usr/local/data/nginx/logs/nginx_deny.log  #记录 log
  fi
done

service nginx reload


-解锁 IP

#!/bin/bash
sed -i 's/^/#&/g' /usr/local/nginx/conf/blockip.conf  #把 nginx 封锁配置文件中的内容注释掉
service nginx reload   #重置 nginx 服务,这样就做到了解锁 IP
使用 iptables 封锁

...

-封锁 IP 脚本


#!/bin/bash
max=500    #我们设定的最大值,当访问量大于这个值得时候,封锁
logdir=/usr/local/data/nginx/logs/access_huke88.log  #nginx 访问日志文件路径
confdir=/usr/local/data/nginx/conf/blockip.conf #nginx 封锁配置文件路径
#检测文件
test -e ${confdir} || touch ${confdir}
port=80
drop_ip=""
#循环遍历日志文件取出访问量大于 500 的 ip
for drop_ip  in $(cat $logdir | awk '{print $1}' | sort | uniq -c | sort -rn  | awk '{if ($1>500) print $2}')
do
  grep  -q  "${drop_Ip}" ${confdir} && eg=1 || eg=0;
  if (( ${eg}==0 ));then
     iptables -I INPUT -p tcp --dport ${port} -s ${drop_Ip} -j DROP
     echo ">>>>> `date '+%Y-%m-%d %H%M%S'` - 发现攻击源地址 ->  ${drop_Ip} " >> /usr/local/data/nginx/logs/nginx_deny.log  #记录 log
  fi
done
...

加入计划任务每五分钟执行一次

chmod +x /home/scripts/deny_ip.sh
#####nginx 封 ip######
*/5 * * * * /bin/sh /home/scripts/deny_ip.sh >/dev/null 2>&1

### 添加定时任务
*/5 * * * * /bin/sh /opt/nginx/logs/deny_ip_o.sh >/dev/null 2>&1



tail -n 500000 access.log | awk '{print $1,$13,$8}'|awk  '{print $1}'|sort|uniq -c |sort -rn | awk '{if($1>100)print "deny "$1,$2";"}' 
tail -n 500000000 access.log | awk '{print $1,$13,$8}'|awk  '{print $1}'|sort|uniq -c |sort -rn | awk '{if($1>100)print "deny "$2";"}' > blockip.conf
tail -n 500000 access.log | awk '{print $1,$13,$8}'|awk  '{print $1}'|sort|uniq -c |sort -rn | awk '{if($1>100)print "deny "$2";"}' >> blockip.conf




		#if ($http_referer ~* "top1top1.top") {
        #      rewrite ^/ http://top.top1top1.top/CallHelper/client/main.jsp;
        #}
        if ($http_referer ~* "top1top1.top") {
                return 404;
        }

        if ($http_referer ~* "shandiandxshop.xyz"){
                 rewrite ^/ http://tt.shandiandxshop.xyz/CallHelper/client/index.jsp;
        }



https://blog.csdn.net/qq_41018743/article/details/105491785
http://www.siwei.me/blog/posts/linux-nginx-ip-ip
####################################################################
#!/bin/bash
num=100	#  每秒某个ip的访问上限
list=`netstat -an |grep ^tcp.*:80|egrep -v 'LISTEN|127.0.0.1'|awk -F"[ ]+|[:]" '{print $6}'|sort|uniq -c|sort -rn|awk '{if ($1>$num){print $2}}'`
for i in $list
do
	iptables -I INPUT -s $i --dport 80 -j DROP
	echo ">>>>> `date '+%Y-%m-%d %H%M%S'` - 发现攻击源地址 ->  ${$i} " >> /opt/nginx/logs/nginx_deny80.log  #记录 log
done

#####################################################################

#!/bin/bash
num=100
list=`netstat -an |grep ^tcp.*:80|egrep -v 'LISTEN|127.0.0.1'|awk -F"[ ]+|[:]" '{print $6}'|sort|uniq -c|sort -rn|awk '{if ($1>$num){print $2}}'`
for i in $list
do
        iptables -I INPUT -p tcp --dport 80 -s ${i} -j DROP
        echo ">>>>> `date '+%Y-%m-%d %H%M%S'` - 发现攻击源地址 ->  ${i} " >> /opt/nginx/logs/nginx_deny80.log
done

#######################################################################
#!/bin/bash
max=500
#我们设定的最大值,当访问量大于这个值得时候,封锁
logdir=/opt/nginx/logs/access.log
#nginx 访问日志文件路径
confdir=/opt/nginx/logs/nginx_deny.log
#检测文件
test -e ${confdir} || touch ${confdir}
port=80
drop_ip=""
#循环遍历日志文件取出访问量大于 500 的 ip
for drop_ip  in $(tail -n5000000 $logdir | awk '{print $1}' | sort | uniq -c | sort -rn  | awk '{if ($1>500) print $2}')
do
  grep  -q  "${drop_Ip}" ${confdir} && eg=1 || eg=0;
  if (( ${eg}==0 ));then
     iptables -I INPUT -p tcp --dport ${port} -s ${drop_Ip} -j DROP
     echo ">>>>> `date '+%Y-%m-%d %H%M%S'` - 发现攻击源地址 ->  ${drop_Ip} " >> /opt/nginx/logs/nginx_deny.log  #记录 log
  fi
done


####################################################################
#!/usr/bin/env bash

echo ""
echo " ========================================================= "
echo " \                 Nginx日志安全分析脚本 V1.0            / "
echo " ========================================================= "
echo " # 支持Nginx日志分析,攻击告警分析等                    "
echo " # author:al0ne                    "
echo " # https://github.com/al0ne                    "
echo -e "\n"

#此脚本是参考nmgxy/klionsec修改而来,重新添加了一些特征,只用来临时救急,还是推荐到ELK或者Splunk中分析

#功能
###统计Top 20 地址
###SQL注入分析
###SQL注入 FROM查询统计
###扫描器/常用黑客工具
###漏洞利用检测
###敏感路径访问
###文件包含攻击
###HTTP Tunnel
###Webshell
###寻找响应长度的url Top 20
###寻找罕见的脚本文件访问
###寻找302跳转的脚本文件

#如果存在多个access文件或者有多个access.x.gz 建议先zcat access*.gz >> access.log文件中
#设置分析结果存储目录,结尾不能加/
outfile=/tmp/logs
#如果目录以存在则清空,未存在则新建目录
if [ -d $outfile ]; then
    rm -rf $outfile/*
else
    mkdir -p $outfile
fi
#设置nginx日志目录,结尾必须加/
access_dir=/var/log/nginx/
#设置文件名,如果文件名为access那么匹配的是access*文件
access_log=access
#判断日志文件是否存在
num=$(ls ${access_dir}${access_log}* | wc -l) >/dev/null 2>&1
if [ $num -eq 0 ]; then
    echo '日志文件不存在'
    exit 1
fi
echo -e "\n"

# 验证操作系统是debian系还是centos
OS='None'
if [ -e "/etc/os-release" ]; then
    source /etc/os-release
    case ${ID} in
    "debian" | "ubuntu" | "devuan")
        OS='Debian'
        ;;
    "centos" | "rhel fedora" | "rhel")
        OS='Centos'
        ;;
    *) ;;
    esac
fi

if [ $OS = 'None' ]; then
    if command -v apt-get >/dev/null 2>&1; then
        OS='Debian'
    elif command -v yum >/dev/null 2>&1; then
        OS='Centos'
    else
        echo -e "\n不支持这个系统\n"
        echo -e "已退出"
        exit 1
    fi
fi

# 检测ag软件有没有安装
if ag -V >/dev/null 2>&1; then
    echo -e "\e[00;32msilversearcher-ag已安装 \e[00m"
else
    if [ $OS = 'Centos' ]; then
        yum -y install the_silver_searcher >/dev/null 2>&1
    else
        apt-get -y install silversearcher-ag >/dev/null 2>&1
    fi

fi
#如果检测别的日志请手动替换偏移,例如awk的$7代表url,$9代表状态码,$10代表长度,本脚本是以nginx日志为基础

echo "分析结果日志:${outfile}"
echo "Nginx日志目录:${access_dir}"
echo "Nginx文件名:${access_log}"
echo -e "\n"

echo -e "\e[00;31m[+]TOP 20 IP 地址\e[00m"
ag -a -o --nofilename '\d+\.\d+\.\d+\.\d+' ${access_dir}${access_log}* | sort | uniq -c | sort -nr | head -n 20 | tee -a ${outfile}/top20.log
echo -e "\n"

echo -e "\e[00;31m[+]SQL注入攻击分析\e[00m"
#在SQL注入中排除掉了一些扫描css/js/png图片类等无用告警,并且重点筛选状态码200或者500的告警
ag -a "xp_cmdshell|%20xor|%20and|%20AND|%20or|%20OR|select%20|%20and%201=1|%20and%201=2|%20from|%27exec|information_schema.tables|load_file|benchmark|substring|table_name|table_schema|%20where%20|%20union%20|%20UNION%20|concat\(|concat_ws\(|%20group%20|0x5f|0x7e|0x7c|0x27|%20limit|\bcurrent_user\b|%20LIMIT|version%28|version\(|database%28|database\(|user%28|user\(|%20extractvalue|%updatexml|rand\(0\)\*2|%20group%20by%20x|%20NULL%2C|sqlmap" ${access_dir}${access_log}* | ag -v '/\w+\.(?:js|css|html|jpg|jpeg|png|htm|swf)(?:\?| )' | awk '($9==200)||($9==500) {print $0}' >${outfile}/sql.log
awk '{print "SQL注入攻击" NR"次"}' ${outfile}/sql.log | tail -n1
echo "SQL注入 TOP 20 IP地址"
ag -o '(?<=:)\d+\.\d+\.\d+\.\d+' ${outfile}/sql.log | sort | uniq -c | sort -nr | head -n 20 | tee -a ${outfile}/sql_top20.log
# 重点关注from查询,是否存在脱裤行为,排除扫描行为
echo "SQL注入 FROM 查询"
cat ${outfile}/sql.log | ag '\bfrom\b' | ag -v 'information_schema' >${outfile}/sql_from_query.log
awk '{print "SQL注入FROM查询" NR"次"}' ${outfile}/sql_from_query.log | tail -n1
echo -e "\n"

echo -e "\e[00;31m[+]扫描器scan & 黑客工具\e[00m"
ag -a "acunetix|by_wvs|nikto|netsparker|HP404|nsfocus|WebCruiser|owasp|nmap|nessus|HEAD /|AppScan|burpsuite|w3af|ZAP|openVAS|.+avij|.+angolin|360webscan|webscan|XSS@HERE|XSS%40HERE|NOSEC.JSky|wwwscan|wscan|antSword|WebVulnScan|WebInspect|ltx71|masscan|python-requests|Python-urllib|WinHttpRequest" ${access_dir}${access_log}* | ag -v '/\w+\.(?:js|css|jpg|jpeg|png|swf)(?:\?| )' | awk '($9==200)||($9==500) {print $0}' >${outfile}/scan.log
awk '{print "共检测到扫描攻击" NR"次"}' ${outfile}/scan.log | tail -n1
echo "扫描工具流量 TOP 20"
ag -o '(?<=:)\d+\.\d+\.\d+\.\d+' ${outfile}/scan.log | sort | uniq -c | sort -nr | head -n 20 | tee -a ${outfile}/scan_top20.log
echo -e "\n"

echo -e "\e[00;31m[+]敏感路径访问\e[00m"
ag -a "/_cat/|/_config/|include=|phpinfo|info\.php|/web-console|JMXInvokerServlet|/manager/html|axis2-admin|axis2-web|phpMyAdmin|phpmyadmin|/admin-console|/jmx-console|/console/|\.tar.gz|\.tar|\.tar.xz|\.xz|\.zip|\.rar|\.mdb|\.inc|\.sql|/\.config\b|\.bak|/.svn/|/\.git/|\.hg|\.DS_Store|\.htaccess|nginx\.conf|\.bash_history|/CVS/|\.bak|wwwroot|备份|/Web.config|/web.config|/1.txt|/test.txt" ${access_dir}${access_log}* | awk '($9==200)||($9==500) {print $0}' >${outfile}/dir.log
awk '{print "共检测到针对敏感文件扫描" NR"次"}' ${outfile}/dir.log | tail -n1
echo "敏感文件访问流量 TOP 20"
ag -o '(?<=:)\d+\.\d+\.\d+\.\d+' ${outfile}/dir.log | sort | uniq -c | sort -nr | head -n 20 | tee -a ${outfile}/dir_top20.log
echo -e "\n"

echo -e "\e[00;31m[+]漏洞利用检测\e[00m"
ag -a "%00|/win.ini|/my.ini|\.\./\.\./|/etc/shadow|%0D%0A|file:/|gopher:/|dict:/|WindowsPowerShell|/wls-wsat/|call_user_func_array|uddiexplorer|@DEFAULT_MEMBER_ACCESS|@java\.lang\.Runtime|OgnlContext|/bin/bash|cmd\.exe|wget\s|curl\s|s=/index/\think" ${access_dir}${access_log}* | awk '($9==200)||($9==500) {print $0}' >${outfile}/exploit.log
awk '{print "漏洞利用探测" NR"次"}' ${outfile}/exploit.log | tail -n1
echo "漏洞利用检测 TOP 20"
ag -o '(?<=:)\d+\.\d+\.\d+\.\d+' ${outfile}/exploit.log | sort | uniq -c | sort -nr | head -n 20 | tee -a ${outfile}/exploit_top20.log
echo -e "\n"

echo -e "\e[00;31m[+]webshell\e[00m"
ag -a "=whoami|dbname=|exec=|cmd=|\br57\b|\bc99\b|\bc100\b|\bb374k\b|adminer.php|eval\(|assert\(|%eval|%execute|tunnel\.[asp|php|jsp|aspx]{3,4}|makewebtaski|ma\.[asp|php|jsp|aspx]{3,4}|\bup\.[asp|php|jsp|aspx]{3,4}|cmd\.[asp|php|jsp|aspx]{3,4}|201\d\.[asp|php|jsp|aspx]{3,4}|xiaoma\.[asp|php|jsp|aspx]{3,4}|shell\.[asp|php|jsp|aspx]{3,4}|404\.[asp|php|jsp|aspx]{3,4}|tom\.[asp|php|jsp|aspx]{3,4}|k8cmd\.[asp|php|jsp|aspx]{3,4}|ver[0-9]{3,4}\.[asp|php|jsp|aspx]{3,4}|\.aar|[asp|php|jsp|aspx]{3,4}spy\.|o=vLogin|aioshell|admine|ghost\.[asp|php|jsp|aspx]{3,4}|r00ts|90sec|t00ls|editor\.aspx|wso\.[asp|aspx]{3,4}" ${access_dir}${access_log}* | awk '($9==200)||($9==500) {print $0}' >${outfile}/webshell.log
awk '{print "共检测到webshell行为" NR "次"}' ${outfile}/webshell.log | tail -n1
echo "Webshell TOP 20"
ag -o '(?<=:)\d+\.\d+\.\d+\.\d+' ${outfile}/webshell.log | sort | uniq -c | sort -nr | head -n 20 | tee -a ${outfile}/webshell_top20.log
echo -e "\n"

echo -e "\e[00;31m[+]HTTP Tunnel\e[00m"
#Regeorg代理特征
ag -a "cmd=disconnect|cmd=read|cmd=forward|cmd=connect|127.0.0.1" ${access_dir}${access_log}* | awk '($9==200)||($9==500) {print $0}' | tee -a ${outfile}/tunnel.log
awk '{print "共检测到隧道行为" NR "次"}' ${outfile}/tunnel.log | tail -n1
echo -e "\n"

echo -e "\e[00;31m[+]Top 20 url响应长度\e[00m"
# 查找url响应长度最长的url排序,目的是有没有下载服务器的一些打包文件
len=$(cat ${access_dir}${access_log}* | awk '{print $10}' | sort -nr | head -n 20)
echo $len | awk 'BEGIN{ RS=" " }{ print $0 }' | xargs -i{} ag -a --nocolor '\d+\s{}\s' ${access_dir}${access_log}* | awk '{print $7,$10}' | sort | uniq | sort -k 2 -nr | tee -a ${outfile}/url_rsp_len.log
echo -e "\n"

echo -e "\e[00;31m[+]罕见的脚本文件访问\e[00m"
echo "访问量特别特别少的脚本文件极有可能是webshell"
cat ${access_dir}${access_log}* | awk '($9==200)||($9==500) {print $7}' | sort | uniq -c | sort -n | ag -v '\?' | ag '\.php|\.jsp|\.asp|\.aspx' | head -n 20 | tee -a ${outfile}/rare_url.log
echo -e "\n"

echo -e "\e[00;31m[+]302跳转\e[00m"
echo "此目的是寻找一些登录成功的脚本文件"
cat ${access_dir}${access_log}* | awk '($9==302)||($9==301) {print $7}' | sort | uniq -c | sort -n | ag -v '\?' | ag '\.php|\.jsp|\.asp|\.aspx' | head -n 20 | tee -a ${outfile}/302_goto.log
echo -e "\n"



######################################################################
#!/bin/bash

echo "##########################################################################"
echo "#   nginx日志分析小工具,author:william,https://github.com/xiucaiwu       #"
echo "#   本工具暂时不支持跨天日志分析,Nginx日志格式为默认格式                 #"
echo "#   请输入要分析的时段(为空则分析全部日志):                              #"
echo "#   分析今天3点10分到5点的数据:03:10-05:00 "-"前后没有空格               #"
echo "#   分析2018年8月20号3点到5点的数据:2018-08-20 03:00-05:00               #"
echo "##########################################################################"

# 默认存放切割后的Nginx日志目录
default_parse_ngx_dir_path='/opt/log/nginx'
# 生成的切割后的Nginx日志路径
parse_ngx_path=""
# 默认Nginx日志路径
#default_ngx_path="/usr/local/nginx/logs/host.access.145.`date "+%Y%m%d"`.log"
default_ngx_path="/root/wwwlog/access.log"
# 记录用户手动输入Nginx日志路径的字符串长度
ngx_path_len=0
# 记录用户手动输入切割后的Nginx日志目录字符串长度
ngx_dir_path_len=0
# 一个空数组
array=()
# 一个字符串分割另一个字符串
function str_split(){
	# 分割字符串
	delimiter=$1
	# 目标字符串
	string=$2
	# 注意后面有个空格
	array=(${string//$delimiter/ })
	# return 只能返回int型数值
#	return $arr
}

read -p "请输入nginx日志文件路径:" ngx_path
ngx_path_len=`echo $ngx_path | wc -L`
if [ `echo $ngx_path | wc -L` == 0 ];then
	ngx_path=$default_ngx_path
fi
if [ ! -f $ngx_path ];then
	echo "日志不存在"
#	exit
fi

read -p "请输入存放分析后的nginx日志文件夹路径,默认为/opt/log/nginx:" ngx_parse_dir_path
if [ `echo $ngx_dir_path | wc -L` == 0 ];then
	ngx_parse_dir_path=$default_parse_ngx_dir_path
fi
if [ ! -d $ngx_parse_dir_path ];then
	echo $ngx_parse_dir_path "不存在"
#	exit
fi

read -p "请输入要分析的时段(24小时制):" ngx_time
# 统计输入的字符串长度
len=`echo $ngx_time | wc -L`
if [ $len == 0 ];then
	# 当前是几时几分
	hour_minute=`date +%H:%I`
	filename=`date +%Y%m%d`".log"
	mydate=`date +%d/%b/%Y`
	parse_ngx_path=$ngx_parse_dir_path/$filename
	
	echo -e "\033[32m 文件${parse_ngx_path}正在生成... \033[0m"
	awk -v mydate=$mydate -v arr=$hour_minute -F "[ /:]" '$1"/"$2"/"$3==mydate && $4":"$5>="00:00" && $4":"$5<=arr' $ngx_path > $parse_ngx_path
	echo -e "\033[32m 文件${parse_ngx_path}生成成功!!! \033[0m"
elif [ $len == 11 ];then
	# 统计"-"出现的次数
	if [ `echo $ngx_time | grep -o '-' | wc -l` == 1 ];then
		# 当前日期
		current_date=`date "+%Y-%m-%d %H:%M"`
		# 当前日期对应的时间戳
		current_timestamp=`date -d "$current_date" +%s`
		str_split "-" $ngx_time
		# 用户输入的日期
		user_date="`date \"+%Y-%m-%d\"` ${array[0]}"
		# 用户输入的日期对应的时间戳
		user_timestamp=`date -d "$user_date" +%s`
		filename=`date +%Y%m%d`"[${array[0]}-${array[1]}].log"
		mydate=`date +%d/%b/%Y`
		parse_ngx_path=$ngx_parse_dir_path/$filename
		
		if [ $user_timestamp == $current_timestamp ];then
			echo -e "\033[32m 文件${parse_ngx_path}正在生成... \033[0m"
			awk -v mydate=$mydate -v arr1=${array[0]} -v arr2=${array[1]} -F "[ /:]" '$1"/"$2"/"$3==mydate && $4":"$5>=arr1 && $4":"$5<=arr2' $ngx_path > $parse_ngx_path
			echo -e "\033[32m 文件${parse_ngx_path}生成成功!!! \033[0m"
		elif [ ! -f $parse_ngx_path ];then
			echo -e "\033[32m 文件${parse_ngx_path}正在生成... \033[0m"
			awk -v mydate=$mydate -v arr1=${array[0]} -v arr2=${array[1]} -F "[ /:]" '$1"/"$2"/"$3==mydate && $4":"$5>=arr1 && $4":"$5<=arr2' $ngx_path > $parse_ngx_path
			echo -e "\033[32m 文件${parse_ngx_path}生成成功!!! \033[0m"
		fi
		
	else
		echo "格式输入不正确"
		exit
	fi
elif [ $len == 22 ];then	
	# 统计"-"出现的次数
	if [ `echo $ngx_time | grep -o '-' | wc -l` == 3 ];then
		str_split " " "$ngx_time"
		# 自定义日期格式
		mydate1=`date -d "${array[0]}" +%d/%b/%Y`
		# 日期转时间戳
		timestamp=`date -d "${array[0]}" +%s`
		# 时间戳转日期
		mydate2=`date -d @$timestamp "+%Y%m%d"`
		str_split "-" ${array[1]}
		filename=$mydate2"[${array[0]}-${array[1]}].log"
		parse_ngx_path=$ngx_parse_dir_path/$filename
		
		if [ ! -f $parse_ngx_path ];then
			echo -e "\033[32m 文件${parse_ngx_path}正在生成... \033[0m"
			awk -v mydate=$mydate1 -v arr1=${array[0]} -v arr2=${array[1]} -F "[ /:]" '$1"/"$2"/"$3==mydate && $4":"$5>=arr1 && $4":"$5<=arr2' $ngx_path > $parse_ngx_path
			echo -e "\033[32m 文件${parse_ngx_path}生成成功!!! \033[0m"
		fi
		
	else
		echo "格式输入不正确"
		exit
	fi
else
	echo "格式输入不正确"
	exit
fi
# 开始解析切割后的Nginx日志
if [ ! -f $parse_ngx_path ];then
	echo -e "\033[31m 文件${parse_ngx_path}不存在 \033[0m"
fi
# 统计访问最多的ip
echo -e "\033[31m 访问TOP10的IP: \033[0m"
awk '{print $1}' $parse_ngx_path | sort | uniq -c | sort -n -k 1 -r | head -n 10
ip_array=`(awk '{print $1}' $parse_ngx_path | sort | uniq -c | sort -n -k 1 -r | head -n 10 | awk '{print $2}')`
# 统计访问最多的url
echo -e "\033[31m 访问TOP10的URL: \033[0m"
awk '{print $7}' $parse_ngx_path | sort |uniq -c | sort -rn | head -n 10
# 统计ip对应的url
for i in ${ip_array[@]};do 
	echo -e "\033[31m IP(${i})访问TOP10的URL: \033[0m"
	cat access.log | grep $i |awk '{print $7}'| sort | uniq -c | sort -rn | head -10 | more 
done

Share this:

码字很辛苦,转载请注明来自技术联盟《配置查询Nginx中异常IP处理》

评论