iwla

iwla Git Source Tree

Root/iwla.py

1#!/usr/bin/env python
2
3import os
4import shutil
5import sys
6import re
7import time
8import pickle
9import gzip
10import importlib
11import argparse
12from calendar import monthrange
13from datetime import date
14
15import default_conf as conf
16import conf as _
17conf.__dict__.update(_.__dict__)
18del _
19
20from iplugin import *
21from display import *
22
23#
24# Main class IWLA
25# Parse Log, compute them, call plugins and produce output
26# For now, only HTTP log are valid
27#
28# Plugin requirements :
29# None
30#
31# Conf values needed :
32# analyzed_filename
33# domain_name
34#
35# Output files :
36# DB_ROOT/meta.db
37# DB_ROOT/year/month/iwla.db
38# OUTPUT_ROOT/index.html
39# OUTPUT_ROOT/year/month/index.html
40#
41# Statistics creation :
42#
43# meta =>
44# last_time
45# start_analysis_time
46# stats =>
47# year =>
48# month =>
49# viewed_bandwidth
50# not_viewed_bandwidth
51# viewed_pages
52# viewed_hits
53# nb_visitors
54#
55# month_stats :
56# viewed_bandwidth
57# not_viewed_bandwidth
58# viewed_pages
59# viewed_hits
60# nb_visitors
61#
62# days_stats :
63# day =>
64# viewed_bandwidth
65# not_viewed_bandwidth
66# viewed_pages
67# viewed_hits
68# nb_visitors
69#
70# visits :
71# remote_addr =>
72# remote_addr
73# remote_ip
74# viewed_pages
75# viewed_hits
76# not_viewed_pages
77# not_viewed_hits
78# bandwidth
79# last_access
80# requests =>
81# [fields_from_format_log]
82# extract_request =>
83# extract_uri
84# extract_parameters*
85# extract_referer* =>
86# extract_uri
87# extract_parameters*
88# robot
89# hit_only
90# is_page
91#
92# valid_visitors:
93# month_stats without robot and hit only visitors (if not conf.count_hit_only_visitors)
94#
95# Statistics update :
96# None
97#
98# Statistics deletion :
99# None
100#
101
102class IWLA(object):
103
104 ANALYSIS_CLASS = 'HTTP'
105 API_VERSION = 1
106 IWLA_VERSION = '0.1'
107
108 def __init__(self):
109 print '==> Start'
110
111 self.meta_infos = {}
112 self.analyse_started = False
113 self.current_analysis = {}
114 self.cache_plugins = {}
115 self.display = DisplayHTMLBuild(self)
116 self.valid_visitors = None
117
118 self.log_format_extracted = re.sub(r'([^\$\w])', r'\\\g<1>', conf.log_format)
119 self.log_format_extracted = re.sub(r'\$(\w+)', '(?P<\g<1>>.+)', self.log_format_extracted)
120 self.http_request_extracted = re.compile(r'(?P<http_method>\S+) (?P<http_uri>\S+) (?P<http_version>\S+)')
121 self.log_re = re.compile(self.log_format_extracted)
122 self.uri_re = re.compile(r'(?P<extract_uri>[^\?]+)(\?(?P<extract_parameters>.+))?')
123 self.plugins = [(conf.PRE_HOOK_DIRECTORY , conf.pre_analysis_hooks),
124 (conf.POST_HOOK_DIRECTORY , conf.post_analysis_hooks),
125 (conf.DISPLAY_HOOK_DIRECTORY , conf.display_hooks)]
126
127 def getVersion(self):
128 return IWLA.IWLA_VERSION
129
130 def getConfValue(self, key, default=None):
131 if not key in dir(conf):
132 return default
133 else:
134 return conf.__dict__[key]
135
136 def _clearVisits(self):
137 self.current_analysis = {
138 'days_stats' : {},
139 'month_stats' : {},
140 'visits' : {}
141 }
142 self.valid_visitors = None
143 return self.current_analysis
144
145 def getDaysStats(self):
146 return self.current_analysis['days_stats']
147
148 def getMonthStats(self):
149 return self.current_analysis['month_stats']
150
151 def getCurrentVisists(self):
152 return self.current_analysis['visits']
153
154 def getValidVisitors(self):
155 return self.valid_visitors
156
157 def getDisplay(self):
158 return self.display
159
160 def getCurTime(self):
161 return self.meta_infos['last_time']
162
163 def getStartAnalysisTime(self):
164 return self.meta_infos['start_analysis_time']
165
166 def isValidForCurrentAnalysis(self, request):
167 cur_time = self.meta_infos['start_analysis_time']
168 # Analyse not started
169 if not cur_time: return False
170 return (time.mktime(cur_time) < time.mktime(request['time_decoded']))
171
172 def hasBeenViewed(self, request):
173 return int(request['status']) in conf.viewed_http_codes
174
175 def getCurDisplayPath(self, filename):
176 cur_time = self.meta_infos['last_time']
177 return os.path.join(str(cur_time.tm_year), str(cur_time.tm_mon), filename)
178
179 def getResourcesPath(self):
180 return conf.resources_path
181
182 def getCSSPath(self):
183 return conf.css_path
184
185 def _clearMeta(self):
186 self.meta_infos = {
187 'last_time' : None,
188 'start_analysis_time' : None
189 }
190 return self.meta_infos
191
192 def _clearDisplay(self):
193 self.display = DisplayHTMLBuild(self)
194return self.display
195
196 def getDBFilename(self, time):
197 return os.path.join(conf.DB_ROOT, str(time.tm_year), str(time.tm_mon), conf.DB_FILENAME)
198
199 def _serialize(self, obj, filename):
200 base = os.path.dirname(filename)
201 if not os.path.exists(base):
202 os.makedirs(base)
203
204 # TODO : remove return
205 #return
206
207 with open(filename + '.tmp', 'wb+') as f:
208 pickle.dump(obj, f)
209 f.seek(0)
210 with gzip.open(filename, 'w') as fzip:
211 fzip.write(f.read())
212 os.remove(filename + '.tmp')
213
214 def _deserialize(self, filename):
215 if not os.path.exists(filename):
216 return None
217
218 with gzip.open(filename, 'r') as f:
219 return pickle.load(f)
220 return None
221
222 def _callPlugins(self, target_root, *args):
223 print '==> Call plugins (%s)' % target_root
224 for (root, plugins) in self.plugins:
225 if root != target_root: continue
226 for p in plugins:
227 mod = self.cache_plugins.get(root + '.' + p, None)
228 if mod:
229 print '\t%s' % (p)
230 mod.hook(*args)
231
232 def isPage(self, request):
233 for e in conf.pages_extensions:
234 if request.endswith(e):
235 return True
236
237 return False
238
239 def _appendHit(self, hit):
240 remote_addr = hit['remote_addr']
241
242 if not remote_addr: return
243
244 if not remote_addr in self.current_analysis['visits'].keys():
245 self._createVisitor(hit)
246
247 super_hit = self.current_analysis['visits'][remote_addr]
248 super_hit['requests'].append(hit)
249 super_hit['bandwidth'] += int(hit['body_bytes_sent'])
250 super_hit['last_access'] = self.meta_infos['last_time']
251
252 request = hit['extract_request']
253
254 uri = request.get('extract_uri', request['http_uri'])
255
256 hit['is_page'] = self.isPage(uri)
257
258 status = int(hit['status'])
259 if status not in conf.viewed_http_codes:
260 return
261
262 if super_hit['robot'] or\
263 not status in conf.viewed_http_codes:
264 page_key = 'not_viewed_pages'
265 hit_key = 'not_viewed_hits'
266 else:
267 page_key = 'viewed_pages'
268 hit_key = 'viewed_hits'
269
270 if hit['is_page']:
271 super_hit[page_key] += 1
272 else:
273 super_hit[hit_key] += 1
274
275 def _createVisitor(self, hit):
276 super_hit = self.current_analysis['visits'][hit['remote_addr']] = {}
277 super_hit['remote_addr'] = hit['remote_addr']
278 super_hit['remote_ip'] = hit['remote_addr']
279 super_hit['viewed_pages'] = 0
280 super_hit['viewed_hits'] = 0
281 super_hit['not_viewed_pages'] = 0
282 super_hit['not_viewed_hits'] = 0
283 super_hit['bandwidth'] = 0
284 super_hit['last_access'] = self.meta_infos['last_time']
285 super_hit['requests'] = []
286 super_hit['robot'] = False
287 super_hit['hit_only'] = 0
288
289 def _decodeHTTPRequest(self, hit):
290 if not 'request' in hit.keys(): return False
291
292 groups = self.http_request_extracted.match(hit['request'])
293
294 if groups:
295 hit['extract_request'] = groups.groupdict()
296 uri_groups = self.uri_re.match(hit['extract_request']['http_uri'])
297 if uri_groups:
298 d = uri_groups.groupdict()
299 hit['extract_request']['extract_uri'] = d['extract_uri']
300 if 'extract_parameters' in d.keys():
301 hit['extract_request']['extract_parameters'] = d['extract_parameters']
302 else:
303 print "Bad request extraction " + hit['request']
304 return False
305
306 if hit['http_referer']:
307 referer_groups = self.uri_re.match(hit['http_referer'])
308 if referer_groups:
309 hit['extract_referer'] = referer_groups.groupdict()
310 return True
311
312 def _decodeTime(self, hit):
313 hit['time_decoded'] = time.strptime(hit['time_local'], conf.time_format)
314 return hit['time_decoded']
315
316 def getDisplayIndex(self):
317 cur_time = self.meta_infos['last_time']
318 filename = self.getCurDisplayPath('index.html')
319
320 return self.display.getPage(filename)
321
322 def _generateDisplayDaysStats(self):
323 cur_time = self.meta_infos['last_time']
324 title = 'Stats %d/%d' % (cur_time.tm_mon, cur_time.tm_year)
325 filename = self.getCurDisplayPath('index.html')
326 print '==> Generate display (%s)' % (filename)
327 page = self.display.createPage(title, filename, conf.css_path)
328
329 _, nb_month_days = monthrange(cur_time.tm_year, cur_time.tm_mon)
330 days = self.display.createBlock(DisplayHTMLBlockTableWithGraph, 'By day', ['Day', 'Visitors', 'Pages', 'Hits', 'Bandwidth', 'Not viewed Bandwidth'], None, nb_month_days, range(1,6))
331 days.setColsCSSClass(['', 'iwla_visitor', 'iwla_page', 'iwla_hit', 'iwla_bandwidth', 'iwla_bandwidth'])
332 nb_visits = 0
333 nb_days = 0
334 for i in range(1, nb_month_days+1):
335 day = '%d<br/>%s' % (i, time.strftime('%b', cur_time))
336 full_day = '%d %s %d' % (i, time.strftime('%b', cur_time), cur_time.tm_year)
337 if i in self.current_analysis['days_stats'].keys():
338 stats = self.current_analysis['days_stats'][i]
339 row = [full_day, stats['nb_visitors'], stats['viewed_pages'], stats['viewed_hits'],
340 stats['viewed_bandwidth'], stats['not_viewed_bandwidth']]
341 nb_visits += stats['nb_visitors']
342 nb_days += 1
343 else:
344 row = [full_day, 0, 0, 0, 0, 0]
345 days.appendRow(row)
346 days.setCellValue(i-1, 4, bytesToStr(row[4]))
347 days.setCellValue(i-1, 5, bytesToStr(row[5]))
348 days.appendShortTitle(day)
349 adate = date(cur_time.tm_year, cur_time.tm_mon, i)
350 week_day = adate.weekday()
351 if week_day == 5 or week_day == 6:
352 days.setRowCSSClass(i-1, 'iwla_weekend')
353 if adate == date.today():
354 css = days.getCellCSSClass(i, 0)
355 if css: css = '%s %s' % (css, 'iwla_curday')
356 else: css = 'iwla_curday'
357 days.setCellCSSClass(i-1, 0, css)
358
359 stats = self.current_analysis['month_stats']
360
361 row = [0, nb_visits, stats['viewed_pages'], stats['viewed_hits'], stats['viewed_bandwidth'], stats['not_viewed_bandwidth']]
362 if nb_days:
363 average_row = map(lambda(v): int(v/nb_days), row)
364 else:
365 average_row = map(lambda(v): 0, row)
366
367 average_row[0] = 'Average'
368 average_row[4] = bytesToStr(average_row[4])
369 average_row[5] = bytesToStr(average_row[5])
370 days.appendRow(average_row)
371
372 row[0] = 'Total'
373 row[4] = bytesToStr(row[4])
374 row[5] = bytesToStr(row[5])
375 days.appendRow(row)
376 page.appendBlock(days)
377 self.display.addPage(page)
378
379 def _generateDisplayMonthStats(self, page, year, month_stats):
380 cur_time = time.localtime()
381 months_name = ['', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'June', 'July', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
382 title = 'Summary %d' % (year)
383 cols = ['Month', 'Visitors', 'Pages', 'Hits', 'Bandwidth', 'Not viewed Bandwidth', 'Details']
384 graph_cols=range(1,6)
385 months = self.display.createBlock(DisplayHTMLBlockTableWithGraph, title, cols, None, 12, graph_cols)
386 months.setColsCSSClass(['', 'iwla_visitor', 'iwla_page', 'iwla_hit', 'iwla_bandwidth', 'iwla_bandwidth', ''])
387 total = [0] * len(cols)
388 for i in range(1, 13):
389 month = '%s<br/>%d' % (months_name[i], year)
390 full_month = '%s %d' % (months_name[i], year)
391 if i in month_stats.keys():
392 stats = month_stats[i]
393 link = '<a href="%d/%d/index.html">Details</a>' % (year, i)
394 row = [full_month, stats['nb_visitors'], stats['viewed_pages'], stats['viewed_hits'],
395 stats['viewed_bandwidth'], stats['not_viewed_bandwidth'], link]
396 for j in graph_cols:
397 total[j] += row[j]
398 else:
399 row = [full_month, 0, 0, 0, 0, 0, '']
400 months.appendRow(row)
401 months.setCellValue(i-1, 4, bytesToStr(row[4]))
402 months.setCellValue(i-1, 5, bytesToStr(row[5]))
403 months.appendShortTitle(month)
404 if year == cur_time.tm_year and i == cur_time.tm_mon:
405 css = months.getCellCSSClass(i-1, 0)
406 if css: css = '%s %s' % (css, 'iwla_curday')
407 else: css = 'iwla_curday'
408 months.setCellCSSClass(i-1, 0, css)
409
410 total[0] = 'Total'
411 total[4] = bytesToStr(total[4])
412 total[5] = bytesToStr(total[5])
413 months.appendRow(total)
414 page.appendBlock(months)
415
416 def _generateDisplayWholeMonthStats(self):
417 title = 'Stats for %s' % (conf.domain_name)
418 filename = 'index.html'
419 print '==> Generate main page (%s)' % (filename)
420
421 page = self.display.createPage(title, filename, conf.css_path)
422
423 last_update = '<b>Last update</b> %s<br />' % (time.strftime('%d %b %Y %H:%M', time.localtime()))
424 page.appendBlock(self.display.createBlock(DisplayHTMLRaw, last_update))
425
426 for year in self.meta_infos['stats'].keys():
427 self._generateDisplayMonthStats(page, year, self.meta_infos['stats'][year])
428
429 self.display.addPage(page)
430
431 def _generateDisplay(self):
432 self._generateDisplayDaysStats()
433 self._callPlugins(conf.DISPLAY_HOOK_DIRECTORY)
434 self._generateDisplayWholeMonthStats()
435 self.display.build(conf.DISPLAY_ROOT)
436
437 def _generateStats(self, visits):
438 stats = {}
439 stats['viewed_bandwidth'] = 0
440 stats['not_viewed_bandwidth'] = 0
441 stats['viewed_pages'] = 0
442 stats['viewed_hits'] = 0
443 #stats['requests'] = set()
444 stats['nb_visitors'] = 0
445
446 for (k, super_hit) in visits.items():
447 if super_hit['robot']:
448 stats['not_viewed_bandwidth'] += super_hit['bandwidth']
449 continue
450
451 #print "[%s] =>\t%d/%d" % (k, super_hit['viewed_pages'], super_hit['viewed_hits'])
452
453 if conf.count_hit_only_visitors or\
454 super_hit['viewed_pages']:
455 stats['nb_visitors'] += 1
456 stats['viewed_bandwidth'] += super_hit['bandwidth']
457 stats['viewed_pages'] += super_hit['viewed_pages']
458 stats['viewed_hits'] += super_hit['viewed_hits']
459
460 # for p in super_hit['requests']:
461 # if not p['is_page']: continue
462 # req = p['extract_request']
463 # stats['requests'].add(req['extract_uri'])
464
465 return stats
466
467 def _generateMonthStats(self):
468 self._clearDisplay()
469
470 visits = self.current_analysis['visits']
471
472 stats = self._generateStats(visits)
473 duplicated_stats = {k:v for (k,v) in stats.items()}
474
475 cur_time = self.meta_infos['last_time']
476 print "== Stats for %d/%d ==" % (cur_time.tm_year, cur_time.tm_mon)
477 print stats
478
479 if not 'month_stats' in self.current_analysis.keys():
480 self.current_analysis['month_stats'] = stats
481 else:
482 for (k,v) in stats.items():
483 self.current_analysis['month_stats'][k] = v
484
485 self.valid_visitors = {}
486 for (k,v) in visits.items():
487 if v['robot']: continue
488 if conf.count_hit_only_visitors and\
489 (not v['viewed_pages']):
490 continue
491 self.valid_visitors[k] = v
492
493 duplicated_stats['visitors'] = stats['visitors'] = len(self.valid_visitors.keys())
494
495 self._callPlugins(conf.POST_HOOK_DIRECTORY)
496
497 path = self.getDBFilename(cur_time)
498 if os.path.exists(path):
499 os.remove(path)
500
501 print "==> Serialize to %s" % path
502
503 self._serialize(self.current_analysis, path)
504
505 # Save month stats
506 year = cur_time.tm_year
507 month = cur_time.tm_mon
508 if not 'stats' in self.meta_infos.keys():
509 self.meta_infos['stats'] = {}
510 if not year in self.meta_infos['stats'].keys():
511 self.meta_infos['stats'][year] = {}
512 self.meta_infos['stats'][year][month] = duplicated_stats
513
514 self._generateDisplay()
515
516 def _generateDayStats(self):
517 visits = self.current_analysis['visits']
518
519 self._callPlugins(conf.PRE_HOOK_DIRECTORY)
520
521 stats = self._generateStats(visits)
522
523 cur_time = self.meta_infos['last_time']
524 print "== Stats for %d/%d/%d ==" % (cur_time.tm_year, cur_time.tm_mon, cur_time.tm_mday)
525
526 if cur_time.tm_mday > 1:
527 last_day = cur_time.tm_mday - 1
528 while last_day:
529 if last_day in self.current_analysis['days_stats'].keys():
530 break
531 last_day -= 1
532 if last_day:
533 for k in stats.keys():
534 stats[k] -= self.current_analysis['days_stats'][last_day][k]
535 stats['nb_visitors'] = 0
536 for (k,v) in visits.items():
537 if v['robot']: continue
538 if conf.count_hit_only_visitors and\
539 (not v['viewed_pages']):
540 continue
541 if v['last_access'].tm_mday == cur_time.tm_mday:
542 stats['nb_visitors'] += 1
543 print stats
544
545 self.current_analysis['days_stats'][cur_time.tm_mday] = stats
546
547 def _newHit(self, hit):
548 t = self._decodeTime(hit)
549
550 cur_time = self.meta_infos['last_time']
551
552 if cur_time == None:
553 self.current_analysis = self._deserialize(self.getDBFilename(t)) or self._clearVisits()
554 self.analyse_started = True
555 else:
556 if not self.analyse_started:
557 if time.mktime(t) < time.mktime(cur_time):
558 return False
559 else:
560 self.analyse_started = True
561 if cur_time.tm_mon != t.tm_mon:
562 self._generateMonthStats()
563 self.current_analysis = self._deserialize(self.getDBFilename(t)) or self._clearVisits()
564 elif cur_time.tm_mday != t.tm_mday:
565 self._generateDayStats()
566
567 self.meta_infos['last_time'] = t
568
569 if not self.meta_infos['start_analysis_time']:
570 self.meta_infos['start_analysis_time'] = t
571
572 if not self._decodeHTTPRequest(hit): return False
573
574 for k in hit.keys():
575 if hit[k] == '-' or hit[k] == '*':
576 hit[k] = ''
577
578 self._appendHit(hit)
579
580 return True
581
582 def start(self, _file):
583 print '==> Load previous database'
584
585 self.meta_infos = self._deserialize(conf.META_PATH) or self._clearMeta()
586 if 'last_time' in self.meta_infos.keys():
587 self.current_analysis = self._deserialize(self.getDBFilename(self.meta_infos['last_time'])) or self._clearVisits()
588 else:
589 self._clearVisits()
590
591 self.meta_infos['start_analysis_time'] = None
592
593 self.cache_plugins = preloadPlugins(self.plugins, self)
594
595 print '==> Analysing log'
596
597 for l in _file:
598 # print "line " + l
599
600 groups = self.log_re.match(l)
601
602 if groups:
603 if not self._newHit(groups.groupdict()):
604 break
605 else:
606 print "No match for " + l
607 #break
608
609 if self.analyse_started:
610 self._generateDayStats()
611 self._generateMonthStats()
612 del self.meta_infos['start_analysis_time']
613 self._serialize(self.meta_infos, conf.META_PATH)
614 else:
615 print '==> Analyse not started : nothing to do'
616 self._generateMonthStats()
617
618if __name__ == '__main__':
619 parser = argparse.ArgumentParser(description='Intelligent Web Log Analyzer')
620
621 parser.add_argument('-c', '--clean-output', dest='clean_output', action='store_true',
622 default=False,
623 help='Clean output before starting')
624
625 parser.add_argument('-i', '--stdin', dest='stdin', action='store_true',
626 default=False,
627 help='Read data from stdin instead of conf.analyzed_filename')
628
629 args = parser.parse_args()
630
631 if args.clean_output:
632 if os.path.exists(conf.DB_ROOT): shutil.rmtree(conf.DB_ROOT)
633 if os.path.exists(conf.DISPLAY_ROOT): shutil.rmtree(conf.DISPLAY_ROOT)
634
635 iwla = IWLA()
636
637 required_conf = ['analyzed_filename', 'domain_name']
638 if not validConfRequirements(required_conf, iwla, 'Main Conf'):
639 sys.exit(0)
640
641 if args.stdin:
642 iwla.start(sys.stdin)
643 else:
644 if not os.path.exists(conf.analyzed_filename):
645 print 'No such file \'%s\'' % (conf.analyzed_filename)
646 sys.exit(-1)
647 with open(conf.analyzed_filename) as f:
648 iwla.start(f)

Archive Download this file

Branches

Tags