iwla

iwla Git Source Tree

Root/plugins/pre_analysis/robots.py

Source at commit 4e02325733e5e8e4f5de2f0046e721f8da7abfff created 6 years 10 months ago.
By Gregory Soutade, Initial commit
1# -*- coding: utf-8 -*-
2#
3# Copyright Grégory Soutadé 2015
4
5# This file is part of iwla
6
7# iwla is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation, either version 3 of the License, or
10# (at your option) any later version.
11#
12# iwla is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License
18# along with iwla. If not, see <http://www.gnu.org/licenses/>.
19#
20
21import re
22
23from iwla import IWLA
24from iplugin import IPlugin
25
26import awstats_data
27
28"""
29Pre analysis hook
30
31Filter robots
32
33Plugin requirements :
34 None
35
36Conf values needed :
37 page_to_hit_conf*
38 hit_to_page_conf*
39
40Output files :
41 None
42
43Statistics creation :
44 None
45
46Statistics update :
47visits :
48 remote_addr =>
49 robot
50
51Statistics deletion :
52 None
53"""
54
55class IWLAPreAnalysisRobots(IPlugin):
56 def __init__(self, iwla):
57 super(IWLAPreAnalysisRobots, self).__init__(iwla)
58 self.API_VERSION = 1
59
60 def load(self):
61 self.awstats_robots = map(lambda (x) : re.compile(('.*%s.*') % (x), re.IGNORECASE), awstats_data.robots)
62
63 return True
64
65# Basic rule to detect robots
66 def hook(self):
67 hits = self.iwla.getCurrentVisists()
68 for (k, super_hit) in hits.items():
69 if super_hit['robot']: continue
70
71 isRobot = False
72 referers = 0
73
74 first_page = super_hit['requests'][0]
75 if not self.iwla.isValidForCurrentAnalysis(first_page): continue
76
77 for r in self.awstats_robots:
78 if r.match(first_page['http_user_agent']):
79 isRobot = True
80 break
81
82 if isRobot:
83 super_hit['robot'] = 1
84 continue
85
86# 1) no pages view --> robot
87 # if not super_hit['viewed_pages']:
88 # super_hit['robot'] = 1
89 # continue
90
91# 2) pages without hit --> robot
92 if not super_hit['viewed_hits']:
93 super_hit['robot'] = 1
94 continue
95
96 for hit in super_hit['requests']:
97# 3) /robots.txt read
98 if hit['extract_request']['http_uri'] == '/robots.txt':
99 isRobot = True
100 break
101
102# 4) Any referer for hits
103 if not hit['is_page'] and hit['http_referer']:
104 referers += 1
105
106 if isRobot:
107 super_hit['robot'] = 1
108 continue
109
110 if not super_hit['viewed_pages'] and \
111 (super_hit['viewed_hits'] and not referers):
112 super_hit['robot'] = 1
113 continue

Archive Download this file

Branches

Tags