查看请求数
[root@staff ~]# netstat -nt | awk '{++a[$NF]} END {for (b in a) print b,"\t",a[b]}'
TIME_WAIT 30
CLOSE_WAIT 2
FIN_WAIT1 1
State 1
FIN_WAIT2 1
ESTABLISHED 110
SYN_RECV 1
servers) 1
LAST_ACK 1
查看访问ip最多
[root@staff ~]# netstat -anlput | awk '{print $5}'| sort| uniq -c| sort -nr| uniq |head -n10
37 0.0.0.0:*
25 10.19.64.9:389
查看被访问的域名,按次数排列
awk '{print $2}' access_log.2015041609 | sort | uniq -c| sort -nr
查看访问IP,按次数排列
awk '{print $3}' access_log.2015041609 | sort -nr | uniq -c| sort -nr |head -10
awk '{print$3,$4}' access_log.2015042314 | sort| uniq -c| sort -nr| uniq |head -n10
查看访问日志,大于500的文件大小,被列出来
awk -F " " '{if ($11>50000) print $0}' access.log > /tmp/dayu500.html
#### apache 用rewrite来禁止某url ####
RewriteEngine on
RewriteCond %{ENV:deny_env} ^1$
RewriteRule /* - [F] # forbidden 拒绝
RewriteRule /pic-1790023-1.html - [F]
#RewriteCond %{HTTP_HOST} ^win8\.pchome\.net$
#RewriteRule ^/$ /Index.php?c=Info&artId=1558647 [NC,L]
# apache 日志中 第一个“0” ,意思是访问的时间,是多少秒,403是状态,224是字节数
0 m.pchome.net 42.156.138.58 - - [02/Sep/2015:11:51:23 +0800] "GET /article/937559_p3.html HTTP/1.1" 403 224 "-" "Mozilla/5.0 (Linux; U; Android 4.3; zh-CN; SCH-N719 Build/JSS15J) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 YisouSpider/1.0.0 Mobile Safari/533.1" -
### Apache 禁止蜘蛛爬虫抓取网页
## 在httpd.conf中 随便找个地方,建议在最后,写上这么几段##
SetEnvIfNoCase Remote_Addr "^123.151.43.94|211.151.247.2|116.237.199.217|219.233.222.78|210.242.215.213|113.31.19.198|221.180.151.203$" deny_env=1
SetEnvIfNoCase User-Agent "http_load|[nN]utch|Synthesio Crawler|YisouSpider|jike.com|AppEngine-Google|appengine|Sogou|bingbot" deny_env=1
<FilesMatch ".">
Order Allow,Deny
Allow from all
Deny from env=deny_env
</FilesMatch>
阅读(974) | 评论(0) | 转发(0) |