Initial commit

This commit is contained in:
Gregory Soutade 2014-11-19 08:01:12 +01:00
parent 26688e4bf7
commit b1549ca884
5 changed files with 123 additions and 28 deletions

1
hooks_pre/H001_robot.py Symbolic link
View File

@ -0,0 +1 @@
../plugins/hooks_pre/H001_robot.py

1
hooks_pre/H002_soutade.py Symbolic link
View File

@ -0,0 +1 @@
../plugins/hooks_pre/H002_soutade.py

91
iwla.py
View File

@ -9,6 +9,7 @@ from robots import awstats_robots;
print '==> Start' print '==> Start'
meta_visit = {}
current_visit = {} current_visit = {}
log_format = '$server_name:$server_port $remote_addr - $remote_user [$time_local] ' +\ log_format = '$server_name:$server_port $remote_addr - $remote_user [$time_local] ' +\
@ -23,16 +24,53 @@ time_format = '%d/%b/%Y:%H:%M:%S +0100'
#print "Log format : " + log_format_extracted #print "Log format : " + log_format_extracted
log_re = re.compile(log_format_extracted) log_re = re.compile(log_format_extracted)
uri_re = re.compile(r'(?P<extract_uri>[^\?]*)\?(?P<extract_parameters>.*)') uri_re = re.compile(r'(?P<extract_uri>[^\?]*)[\?(?P<extract_parameters>.*)]?')
pages_extensions = ['/', 'html', 'xhtml', 'py', 'pl', 'rb', 'php'] pages_extensions = ['/', 'html', 'xhtml', 'py', 'pl', 'rb', 'php']
viewed_http_codes = [200] viewed_http_codes = [200]
cur_time = None cur_time = None
PRE_HOOK_DIRECTORY = './hooks_pre/*.py'
POST_HOOK_DIRECTORY = './hooks_post/*.py'
print '==> Generating robot dictionary' print '==> Generating robot dictionary'
awstats_robots = map(lambda (x) : re.compile(x, re.IGNORECASE), awstats_robots) awstats_robots = map(lambda (x) : re.compile(x, re.IGNORECASE), awstats_robots)
def generate_day_stats():
days_stats = {}
days_stats['viewed_bandwidth'] = 0
days_stats['not_viewed_bandwidth'] = 0
days_stats['viewed_pages'] = 0
days_stats['viewed_hits'] = 0
days_stats['pages'] = set()
for k in current_visit.keys():
super_hit = current_visit[k]
if super_hit['robot']:
days_stats['not_viewed_bandwidth'] += super_hit['bandwith']
continue
days_stats['viewed_bandwidth'] += super_hit['bandwith']
days_stats['viewed_pages'] += super_hit['viewed_pages']
days_stats['viewed_hits'] += super_hit['viewed_hits']
for p in super_hit['pages']:
if not p['is_page']: continue
req = p['extract_request']
days_stats['pages'].add(req['extract_uri'])
return days_stats
def call_plugins(path, *kwargs):
print '==> Call plugins (%s)' % path
plugins = glob.glob(path)
plugins.sort()
for p in plugins:
print '\t%s' % (p)
mod = imp.load_source('hook', p)
mod.hook(*kwargs)
def isPage(request): def isPage(request):
for e in pages_extensions: for e in pages_extensions:
if request.endswith(e): if request.endswith(e):
@ -70,7 +108,7 @@ def appendHit(hit):
else: else:
super_hit[hit_key] += 1 super_hit[hit_key] += 1
def createGeneric(hit): def createUser(hit):
super_hit = current_visit[hit['remote_addr']] = {} super_hit = current_visit[hit['remote_addr']] = {}
super_hit['viewed_pages'] = 0; super_hit['viewed_pages'] = 0;
super_hit['viewed_hits'] = 0; super_hit['viewed_hits'] = 0;
@ -78,12 +116,7 @@ def createGeneric(hit):
super_hit['not_viewed_hits'] = 0; super_hit['not_viewed_hits'] = 0;
super_hit['bandwith'] = 0; super_hit['bandwith'] = 0;
super_hit['pages'] = []; super_hit['pages'] = [];
super_hit['robot'] = isRobot(hit);
return super_hit
def createUser(hit, robot):
super_hit = createGeneric(hit)
super_hit['robot'] = robot;
appendHit(hit) appendHit(hit)
def isRobot(hit): def isRobot(hit):
@ -101,16 +134,17 @@ def decode_http_request(hit):
hit['extract_request'] = groups.groupdict() hit['extract_request'] = groups.groupdict()
uri_groups = uri_re.match(hit['extract_request']['http_uri']); uri_groups = uri_re.match(hit['extract_request']['http_uri']);
if uri_groups: if uri_groups:
hit['extract_request']['extract_uri'] = uri_groups.group('extract_uri') d = uri_groups.groupdict()
hit['extract_request']['extract_parameters'] = uri_groups.group('extract_parameters') hit['extract_request']['extract_uri'] = d['extract_uri']
if 'extract_parameters' in d.keys():
hit['extract_request']['extract_parameters'] = d['extract_parameters']
else: else:
print "Bad request extraction " + hit['request'] print "Bad request extraction " + hit['request']
return False return False
referer_groups = uri_re.match(hit['http_referer']); referer_groups = uri_re.match(hit['http_referer']);
if referer_groups: if referer_groups:
hit['extract_referer']['extract_uri'] = referer_groups.group('extract_uri') referer = hit['extract_referer'] = referer_groups.groupdict()
hit['extract_referer']['extract_parameters'] = referer_groups.group('extract_parameters')
return True return True
def decode_time(hit): def decode_time(hit):
@ -131,7 +165,7 @@ def newHit(hit):
t = hit['time_decoded'] t = hit['time_decoded']
current_visit['last_time'] = t meta_visit['last_time'] = t
if cur_time == None: if cur_time == None:
cur_time = t cur_time = t
@ -143,7 +177,7 @@ def newHit(hit):
if remote_addr in current_visit.keys(): if remote_addr in current_visit.keys():
appendHit(hit) appendHit(hit)
else: else:
createUser(hit, isRobot(hit)) createUser(hit)
return True return True
@ -161,18 +195,19 @@ for l in f:
print "No match " + l print "No match " + l
f.close(); f.close();
print '==> Call plugins' call_plugins(PRE_HOOK_DIRECTORY, current_visit)
plugins = glob.glob('./hooks_pre/*.py')
plugins.sort()
for p in plugins:
print '\t%s' % (p)
mod = imp.load_source('hook', p)
mod.hook(current_visit)
for ip in current_visit.keys(): stats = generate_day_stats()
hit = current_visit[ip]
if hit['robot']: continue print stats
print "%s =>" % (ip) valid_visitors = {k: v for (k,v) in current_visit.items() if not current_visit[k]['robot']}
for k in hit.keys(): #print valid_visitors
if k != 'pages': # for ip in current_visit.keys():
print "\t%s : %s" % (k, current_visit[ip][k]) # hit = current_visit[ip]
# if hit['robot']: continue
# print "%s =>" % (ip)
# for k in hit.keys():
# if k != 'pages':
# print "\t%s : %s" % (k, current_visit[ip][k])
call_plugins(POST_HOOK_DIRECTORY, valid_visitors)

View File

@ -0,0 +1,39 @@
# Basic rule to detect robots
def hook(hits):
for k in hits.keys():
super_hit = hits[k]
if super_hit['robot']: continue
isRobot = False
referers = 0
# 1) no pages view --> robot
if not super_hit['viewed_pages']:
super_hit['robot'] = 1
continue
# 2) pages without hit --> robot
if not super_hit['viewed_hits']:
super_hit['robot'] = 1
continue
for hit in super_hit['pages']:
# 3) /robots.txt read
if hit['extract_request']['http_uri'] == '/robots.txt':
isRobot = True
break
# 4) Any referer for hits
if not hit['is_page'] and hit['http_referer']:
referers += 1
if isRobot:
super_hit['robot'] = 1
continue
if super_hit['viewed_hits'] and not referers:
super_hit['robot'] = 1
continue

View File

@ -0,0 +1,19 @@
import re
# Remove logo from indefero
logo_re = re.compile(r'^.+/logo/$')
# Basic rule to detect robots
def hook(hits):
for k in hits.keys():
super_hit = hits[k]
if super_hit['robot']: continue
for p in super_hit['pages']:
if not p['is_page']: continue
if logo_re.match(p['extract_request']['extract_uri']):
p['is_page'] = False
super_hit['viewed_pages'] -= 1
super_hit['viewed_hits'] += 1