iwla

iwla Git Source Tree

Root/iwla.py

1#!/usr/bin/env python
2
3import os
4import shutil
5import sys
6import re
7import time
8import pickle
9import gzip
10import importlib
11import argparse
12import logging
13import gettext
14from calendar import monthrange
15from datetime import date
16
17import default_conf as conf
18import conf as _
19conf.__dict__.update(_.__dict__)
20del _
21
22from iplugin import *
23from display import *
24
25#
26# Main class IWLA
27# Parse Log, compute them, call plugins and produce output
28# For now, only HTTP log are valid
29#
30# Plugin requirements :
31# None
32#
33# Conf values needed :
34# analyzed_filename
35# domain_name
36# locales_path
37# compress_output_files*
38#
39# Output files :
40# DB_ROOT/meta.db
41# DB_ROOT/year/month/iwla.db
42# OUTPUT_ROOT/index.html
43# OUTPUT_ROOT/year/month/index.html
44#
45# Statistics creation :
46#
47# meta :
48# last_time
49# start_analysis_time
50# stats =>
51# year =>
52# month =>
53# viewed_bandwidth
54# not_viewed_bandwidth
55# viewed_pages
56# viewed_hits
57# nb_visits
58# nb_visitors
59#
60# month_stats :
61# viewed_bandwidth
62# not_viewed_bandwidth
63# viewed_pages
64# viewed_hits
65# nb_visits
66#
67# days_stats :
68# day =>
69# viewed_bandwidth
70# not_viewed_bandwidth
71# viewed_pages
72# viewed_hits
73# nb_visits
74# nb_visitors
75#
76# visits :
77# remote_addr =>
78# remote_addr
79# remote_ip
80# viewed_pages
81# viewed_hits
82# not_viewed_pages
83# not_viewed_hits
84# bandwidth
85# last_access
86# requests =>
87# [fields_from_format_log]
88# extract_request =>
89# extract_uri
90# extract_parameters*
91# extract_referer* =>
92# extract_uri
93# extract_parameters*
94# robot
95# hit_only
96# is_page
97#
98# valid_visitors:
99# month_stats without robot and hit only visitors (if not conf.count_hit_only_visitors)
100#
101# Statistics update :
102# None
103#
104# Statistics deletion :
105# None
106#
107
108class IWLA(object):
109
110 ANALYSIS_CLASS = 'HTTP'
111 API_VERSION = 1
112 IWLA_VERSION = '0.1'
113
114 def __init__(self, logLevel):
115 self.meta_infos = {}
116 self.analyse_started = False
117 self.current_analysis = {}
118 self.cache_plugins = {}
119 self.display = DisplayHTMLBuild(self)
120 self.valid_visitors = None
121
122 self.log_format_extracted = re.sub(r'([^\$\w])', r'\\\g<1>', conf.log_format)
123 self.log_format_extracted = re.sub(r'\$(\w+)', '(?P<\g<1>>.+)', self.log_format_extracted)
124 self.http_request_extracted = re.compile(r'(?P<http_method>\S+) (?P<http_uri>\S+) (?P<http_version>\S+)')
125 self.log_re = re.compile(self.log_format_extracted)
126 self.uri_re = re.compile(r'(?P<extract_uri>[^\?]+)(\?(?P<extract_parameters>.+))?')
127 self.domain_name_re = re.compile(r'.*%s' % conf.domain_name)
128 self.plugins = [(conf.PRE_HOOK_DIRECTORY , conf.pre_analysis_hooks),
129 (conf.POST_HOOK_DIRECTORY , conf.post_analysis_hooks),
130 (conf.DISPLAY_HOOK_DIRECTORY , conf.display_hooks)]
131
132 logging.basicConfig(format='%(name)s %(message)s', level=logLevel)
133 self.logger = logging.getLogger(self.__class__.__name__)
134 self.logger.info('==> Start')
135 try:
136 t = gettext.translation('iwla', localedir=conf.locales_path, languages=[conf.locale], codeset='utf8')
137 self.logger.info('\tUsing locale %s' % (conf.locale))
138 except IOError:
139 t = gettext.NullTranslations()
140 self.logger.info('\tUsing default locale en_EN')
141 self._ = t.ugettext
142
143 def getVersion(self):
144 return IWLA.IWLA_VERSION
145
146 def getConfValue(self, key, default=None):
147 if not key in dir(conf):
148 return default
149 else:
150 return conf.__dict__[key]
151
152 def _clearVisits(self):
153 self.current_analysis = {
154 'days_stats' : {},
155 'month_stats' : {},
156 'visits' : {}
157 }
158 self.valid_visitors = None
159 return self.current_analysis
160
161 def getDaysStats(self):
162 return self.current_analysis['days_stats']
163
164 def getMonthStats(self):
165 return self.current_analysis['month_stats']
166
167 def getCurrentVisists(self):
168 return self.current_analysis['visits']
169
170 def getValidVisitors(self):
171 return self.valid_visitors
172
173 def getDisplay(self):
174 return self.display
175
176 def getCurTime(self):
177 return self.meta_infos['last_time']
178
179 def getStartAnalysisTime(self):
180 return self.meta_infos['start_analysis_time']
181
182 def isValidForCurrentAnalysis(self, request):
183 cur_time = self.meta_infos['start_analysis_time']
184 # Analyse not started
185 if not cur_time: return False
186 return (time.mktime(cur_time) < time.mktime(request['time_decoded']))
187
188 def hasBeenViewed(self, request):
189 return int(request['status']) in conf.viewed_http_codes
190
191 def getCurDisplayPath(self, filename):
192 cur_time = self.meta_infos['last_time']
193 return os.path.join(str(cur_time.tm_year), '%02d' % (cur_time.tm_mon), filename)
194
195 def getResourcesPath(self):
196 return conf.resources_path
197
198 def getCSSPath(self):
199 return conf.css_path
200
201 def _clearMeta(self):
202 self.meta_infos = {
203 'last_time' : None,
204 'start_analysis_time' : None
205 }
206 return self.meta_infos
207
208 def _clearDisplay(self):
209 self.display = DisplayHTMLBuild(self)
210return self.display
211
212 def getDBFilename(self, time):
213 return os.path.join(conf.DB_ROOT, str(time.tm_year), '%02d' % (time.tm_mon), conf.DB_FILENAME)
214
215 def _serialize(self, obj, filename):
216 base = os.path.dirname(filename)
217 if not os.path.exists(base):
218 os.makedirs(base)
219
220 # TODO : remove return
221 #return
222
223 with open(filename + '.tmp', 'wb+') as f:
224 pickle.dump(obj, f)
225 f.seek(0)
226 with gzip.open(filename, 'w') as fzip:
227 fzip.write(f.read())
228 os.remove(filename + '.tmp')
229
230 def _deserialize(self, filename):
231 if not os.path.exists(filename):
232 return None
233
234 with gzip.open(filename, 'r') as f:
235 return pickle.load(f)
236 return None
237
238 def _callPlugins(self, target_root, *args):
239 self.logger.info('==> Call plugins (%s)' % (target_root))
240 for (root, plugins) in self.plugins:
241 if root != target_root: continue
242 for p in plugins:
243 mod = self.cache_plugins.get(root + '.' + p, None)
244 if mod:
245 self.logger.info('\t%s' % (p))
246 mod.hook(*args)
247
248 def isPage(self, request):
249 for e in conf.pages_extensions:
250 if request.endswith(e):
251 return True
252
253 return False
254
255 def _appendHit(self, hit):
256 remote_addr = hit['remote_addr']
257
258 if not remote_addr: return
259
260 if not remote_addr in self.current_analysis['visits'].keys():
261 self._createVisitor(hit)
262
263 super_hit = self.current_analysis['visits'][remote_addr]
264 super_hit['requests'].append(hit)
265 super_hit['bandwidth'] += int(hit['body_bytes_sent'])
266 super_hit['last_access'] = self.meta_infos['last_time']
267
268 request = hit['extract_request']
269
270 uri = request.get('extract_uri', request['http_uri'])
271
272 hit['is_page'] = self.isPage(uri)
273
274 if super_hit['robot'] or\
275 not self.hasBeenViewed(hit):
276 page_key = 'not_viewed_pages'
277 hit_key = 'not_viewed_hits'
278 else:
279 page_key = 'viewed_pages'
280 hit_key = 'viewed_hits'
281
282 if hit['is_page']:
283 super_hit[page_key] += 1
284 else:
285 super_hit[hit_key] += 1
286
287 def _createVisitor(self, hit):
288 super_hit = self.current_analysis['visits'][hit['remote_addr']] = {}
289 super_hit['remote_addr'] = hit['remote_addr']
290 super_hit['remote_ip'] = hit['remote_addr']
291 super_hit['viewed_pages'] = 0
292 super_hit['viewed_hits'] = 0
293 super_hit['not_viewed_pages'] = 0
294 super_hit['not_viewed_hits'] = 0
295 super_hit['bandwidth'] = 0
296 super_hit['last_access'] = self.meta_infos['last_time']
297 super_hit['requests'] = []
298 super_hit['robot'] = False
299 super_hit['hit_only'] = 0
300
301 def _decodeHTTPRequest(self, hit):
302 if not 'request' in hit.keys(): return False
303
304 groups = self.http_request_extracted.match(hit['request'])
305
306 if groups:
307 hit['extract_request'] = groups.groupdict()
308 uri_groups = self.uri_re.match(hit['extract_request']['http_uri'])
309 if uri_groups:
310 d = uri_groups.groupdict()
311 hit['extract_request']['extract_uri'] = d['extract_uri']
312 if 'extract_parameters' in d.keys():
313 hit['extract_request']['extract_parameters'] = d['extract_parameters']
314 else:
315 self.logger.warning("Bad request extraction %s" % (hit['request']))
316 return False
317
318 if hit['http_referer']:
319 referer_groups = self.uri_re.match(hit['http_referer'])
320 if referer_groups:
321 hit['extract_referer'] = referer_groups.groupdict()
322 return True
323
324 def _decodeTime(self, hit):
325 try:
326 hit['time_decoded'] = time.strptime(hit['time_local'], conf.time_format)
327 except ValueError, e:
328 if sys.version_info < (3, 2):
329 # Try without UTC value at the end (%z not recognized)
330 gmt_offset_str = hit['time_local'][-5:]
331 gmt_offset_hours = int(gmt_offset_str[1:3])*60*60
332 gmt_offset_minutes = int(gmt_offset_str[3:5])*60
333 gmt_offset = gmt_offset_hours + gmt_offset_minutes
334 hit['time_decoded'] = time.strptime(hit['time_local'][:-6], conf.time_format[:-3])
335 if gmt_offset_str[0] == '+':
336 hit['time_decoded'] = time.localtime(time.mktime(hit['time_decoded'])+gmt_offset)
337 else:
338 hit['time_decoded'] = time.localtime(time.mktime(hit['time_decoded'])-gmt_offset)
339 else:
340 raise e
341 return hit['time_decoded']
342
343 def getDisplayIndex(self):
344 cur_time = self.meta_infos['last_time']
345 filename = self.getCurDisplayPath('index.html')
346
347 return self.display.getPage(filename)
348
349 def _generateDisplayDaysStats(self):
350 cur_time = self.meta_infos['last_time']
351 title = '%s %d/%02d' % (self._('Statistics'), cur_time.tm_year, cur_time.tm_mon)
352 filename = self.getCurDisplayPath('index.html')
353 self.logger.info('==> Generate display (%s)' % (filename))
354 page = self.display.createPage(title, filename, conf.css_path)
355
356 _, nb_month_days = monthrange(cur_time.tm_year, cur_time.tm_mon)
357 days = self.display.createBlock(DisplayHTMLBlockTableWithGraph, self._('By day'), [self._('Day'), self._('Visits'), self._('Pages'), self._('Hits'), self._('Bandwidth'), self._('Not viewed Bandwidth')], None, nb_month_days, range(1,6))
358 days.setColsCSSClass(['', 'iwla_visit', 'iwla_page', 'iwla_hit', 'iwla_bandwidth', 'iwla_bandwidth'])
359 nb_visits = 0
360 nb_days = 0
361 for i in range(1, nb_month_days+1):
362 day = '%d<br/>%s' % (i, time.strftime('%b', cur_time))
363 full_day = '%02d %s %d' % (i, time.strftime('%b', cur_time), cur_time.tm_year)
364 if i in self.current_analysis['days_stats'].keys():
365 stats = self.current_analysis['days_stats'][i]
366 row = [full_day, stats['nb_visits'], stats['viewed_pages'], stats['viewed_hits'],
367 stats['viewed_bandwidth'], stats['not_viewed_bandwidth']]
368 nb_visits += stats['nb_visits']
369 nb_days += 1
370 else:
371 row = [full_day, 0, 0, 0, 0, 0]
372 days.appendRow(row)
373 days.setCellValue(i-1, 4, bytesToStr(row[4]))
374 days.setCellValue(i-1, 5, bytesToStr(row[5]))
375 days.appendShortTitle(day)
376 adate = date(cur_time.tm_year, cur_time.tm_mon, i)
377 week_day = adate.weekday()
378 if week_day == 5 or week_day == 6:
379 days.setRowCSSClass(i-1, 'iwla_weekend')
380 if adate == date.today():
381 css = days.getCellCSSClass(i-1, 0)
382 if css: css = '%s %s' % (css, 'iwla_curday')
383 else: css = 'iwla_curday'
384 days.setCellCSSClass(i-1, 0, css)
385
386 stats = self.current_analysis['month_stats']
387
388 row = [0, nb_visits, stats['viewed_pages'], stats['viewed_hits'], stats['viewed_bandwidth'], stats['not_viewed_bandwidth']]
389 if nb_days:
390 average_row = map(lambda(v): int(v/nb_days), row)
391 else:
392 average_row = map(lambda(v): 0, row)
393
394 average_row[0] = self._('Average')
395 average_row[4] = bytesToStr(average_row[4])
396 average_row[5] = bytesToStr(average_row[5])
397 days.appendRow(average_row)
398
399 row[0] = self._('Total')
400 row[4] = bytesToStr(row[4])
401 row[5] = bytesToStr(row[5])
402 days.appendRow(row)
403 page.appendBlock(days)
404 self.display.addPage(page)
405
406 def _generateDisplayMonthStats(self, page, year, month_stats):
407 cur_time = time.localtime()
408 months_name = ['', self._('Jan'), self._('Feb'), self._('Mar'), self._('Apr'), self._('May'), self._('June'), self._('July'), self._('Aug'), self._('Sep'), self._('Oct'), self._('Nov'), self._('Dec')]
409 title = '%s %d' % (self._('Summary'), year)
410 cols = [self._('Month'), self._('Visitors'), self._('Visits'), self._('Pages'), self._('Hits'), self._('Bandwidth'), self._('Not viewed Bandwidth'), self._('Details')]
411 graph_cols=range(1,7)
412 months = self.display.createBlock(DisplayHTMLBlockTableWithGraph, title, cols, None, 12, graph_cols)
413 months.setColsCSSClass(['', 'iwla_visitor', 'iwla_visit', 'iwla_page', 'iwla_hit', 'iwla_bandwidth', 'iwla_bandwidth', ''])
414 total = [0] * len(cols)
415 for i in range(1, 13):
416 month = '%s<br/>%d' % (months_name[i], year)
417 full_month = '%s %d' % (months_name[i], year)
418 if i in month_stats.keys():
419 stats = month_stats[i]
420 link = '<a href="%d/%02d/index.html">%s</a>' % (year, i, self._('Details'))
421 row = [full_month, stats['nb_visitors'], stats['nb_visits'], stats['viewed_pages'], stats['viewed_hits'],
422 stats['viewed_bandwidth'], stats['not_viewed_bandwidth'], link]
423 for j in graph_cols:
424 total[j] += row[j]
425 else:
426 row = [full_month, 0, 0, 0, 0, 0, 0, '']
427 months.appendRow(row)
428 months.setCellValue(i-1, 5, bytesToStr(row[5]))
429 months.setCellValue(i-1, 6, bytesToStr(row[6]))
430 months.appendShortTitle(month)
431 if year == cur_time.tm_year and i == cur_time.tm_mon:
432 css = months.getCellCSSClass(i-1, 0)
433 if css: css = '%s %s' % (css, 'iwla_curday')
434 else: css = 'iwla_curday'
435 months.setCellCSSClass(i-1, 0, css)
436
437 total[0] = self._('Total')
438 total[5] = bytesToStr(total[5])
439 total[6] = bytesToStr(total[6])
440 months.appendRow(total)
441 page.appendBlock(months)
442
443 def _generateDisplayWholeMonthStats(self):
444 title = '%s %s' % (self._('Statistics for'), conf.domain_name)
445 filename = 'index.html'
446
447 self.logger.info('==> Generate main page (%s)' % (filename))
448
449 page = self.display.createPage(title, filename, conf.css_path)
450
451 last_update = '<b>%s</b> %s<br />' % (self._('Last update'), time.strftime('%02d %b %Y %H:%M', time.localtime()))
452 page.appendBlock(self.display.createBlock(DisplayHTMLRaw, last_update))
453
454 for year in sorted(self.meta_infos['stats'].keys(), reverse=True):
455 self._generateDisplayMonthStats(page, year, self.meta_infos['stats'][year])
456
457 self.display.addPage(page)
458
459 def _compressFile(self, build_time, root, filename):
460 path = os.path.join(root, filename)
461 gz_path = path + '.gz'
462
463 self.logger.debug('Compress %s => %s' % (path, gz_path))
464
465 if not os.path.exists(gz_path) or\
466 os.stat(path).st_mtime > build_time:
467 with open(path, 'rb') as f_in:
468 with gzip.open(gz_path, 'wb') as f_out:
469 f_out.write(f_in.read())
470
471 def _compressFiles(self, build_time, root):
472 if not conf.compress_output_files: return
473 for rootdir, subdirs, files in os.walk(root, followlinks=True):
474 for f in files:
475 for ext in conf.compress_output_files:
476 if f.endswith(ext):
477 self._compressFile(build_time, rootdir, f)
478 break
479
480 def _generateDisplay(self):
481 self._generateDisplayDaysStats()
482 self._callPlugins(conf.DISPLAY_HOOK_DIRECTORY)
483 self._generateDisplayWholeMonthStats()
484 build_time = time.localtime()
485 self.display.build(conf.DISPLAY_ROOT)
486 self._compressFiles(build_time, conf.DISPLAY_ROOT)
487
488 def _createEmptyStats(self):
489 stats = {}
490 stats['viewed_bandwidth'] = 0
491 stats['not_viewed_bandwidth'] = 0
492 stats['viewed_pages'] = 0
493 stats['viewed_hits'] = 0
494 stats['nb_visits'] = 0
495
496 return stats
497
498 def _generateMonthStats(self):
499 self._clearDisplay()
500
501 visits = self.current_analysis['visits']
502
503 stats = self._createEmptyStats()
504 for (day, stat) in self.current_analysis['days_stats'].items():
505 for k in stats.keys():
506 stats[k] += stat[k]
507
508 duplicated_stats = {k:v for (k,v) in stats.items()}
509
510 cur_time = self.meta_infos['last_time']
511 self.logger.info("== Stats for %d/%02d ==" % (cur_time.tm_year, cur_time.tm_mon))
512 self.logger.info(stats)
513
514 if not 'month_stats' in self.current_analysis.keys():
515 self.current_analysis['month_stats'] = stats
516 else:
517 for (k,v) in stats.items():
518 self.current_analysis['month_stats'][k] = v
519
520 self.valid_visitors = {}
521 for (k,v) in visits.items():
522 if v['robot']: continue
523 if conf.count_hit_only_visitors and\
524 (not v['viewed_pages']):
525 continue
526 self.valid_visitors[k] = v
527
528 duplicated_stats['nb_visitors'] = stats['nb_visitors'] = len(self.valid_visitors.keys())
529
530 self._callPlugins(conf.POST_HOOK_DIRECTORY)
531
532 path = self.getDBFilename(cur_time)
533 if os.path.exists(path):
534 os.remove(path)
535
536 self.logger.info("==> Serialize to %s" % (path))
537 self._serialize(self.current_analysis, path)
538
539 # Save month stats
540 year = cur_time.tm_year
541 month = cur_time.tm_mon
542 if not 'stats' in self.meta_infos.keys():
543 self.meta_infos['stats'] = {}
544 if not year in self.meta_infos['stats'].keys():
545 self.meta_infos['stats'][year] = {}
546 self.meta_infos['stats'][year][month] = duplicated_stats
547
548 self._generateDisplay()
549
550 def _generateDayStats(self):
551 visits = self.current_analysis['visits']
552 cur_time = self.meta_infos['last_time']
553
554 self._callPlugins(conf.PRE_HOOK_DIRECTORY)
555
556 stats = self._createEmptyStats()
557
558 for (k, super_hit) in visits.items():
559 if super_hit['last_access'].tm_mday != cur_time.tm_mday:
560 continue
561 viewed_page = False
562 for hit in super_hit['requests'][::-1]:
563 if hit['time_decoded'].tm_mday != cur_time.tm_mday:
564 break
565 if super_hit['robot'] or\
566 not self.hasBeenViewed(hit):
567 stats['not_viewed_bandwidth'] += int(hit['body_bytes_sent'])
568 continue
569 stats['viewed_bandwidth'] += int(hit['body_bytes_sent'])
570 if hit['is_page']:
571 stats['viewed_pages'] += 1
572 viewed_pages = True
573 else:
574 stats['viewed_hits'] += 1
575 if (conf.count_hit_only_visitors or\
576 viewed_pages) and\
577 not super_hit['robot']:
578 stats['nb_visits'] += 1
579
580 self.logger.info("== Stats for %d/%02d/%02d ==" % (cur_time.tm_year, cur_time.tm_mon, cur_time.tm_mday))
581 self.logger.info(stats)
582
583 self.current_analysis['days_stats'][cur_time.tm_mday] = stats
584
585 def _newHit(self, hit):
586 if not self.domain_name_re.match(hit['server_name']):
587 return False
588
589 t = self._decodeTime(hit)
590
591 cur_time = self.meta_infos['last_time']
592
593 if cur_time == None:
594 self.current_analysis = self._deserialize(self.getDBFilename(t)) or self._clearVisits()
595 self.analyse_started = True
596 else:
597 if time.mktime(t) <= time.mktime(cur_time):
598 return False
599 self.analyse_started = True
600 if cur_time.tm_mon != t.tm_mon:
601 self._generateMonthStats()
602 self.current_analysis = self._deserialize(self.getDBFilename(t)) or self._clearVisits()
603 elif cur_time.tm_mday != t.tm_mday:
604 self._generateDayStats()
605
606 self.meta_infos['last_time'] = t
607
608 if not self.meta_infos['start_analysis_time']:
609 self.meta_infos['start_analysis_time'] = t
610
611 if not self._decodeHTTPRequest(hit): return False
612
613 for k in hit.keys():
614 if hit[k] == '-' or hit[k] == '*':
615 hit[k] = ''
616
617 self._appendHit(hit)
618
619 return True
620
621 def start(self, _file):
622 self.logger.info('==> Load previous database')
623
624 self.meta_infos = self._deserialize(conf.META_PATH) or self._clearMeta()
625 if self.meta_infos['last_time']:
626 self.logger.info('Last time')
627 self.logger.info(self.meta_infos['last_time'])
628 self.current_analysis = self._deserialize(self.getDBFilename(self.meta_infos['last_time'])) or self._clearVisits()
629 else:
630 self._clearVisits()
631
632 self.meta_infos['start_analysis_time'] = None
633
634 self.cache_plugins = preloadPlugins(self.plugins, self)
635
636 self.logger.info('==> Analysing log')
637
638 for l in _file:
639 # print "line " + l
640
641 groups = self.log_re.match(l)
642
643 if groups:
644 self._newHit(groups.groupdict())
645 else:
646 self.logger.warning("No match for %s" % (l))
647 #break
648
649 if self.analyse_started:
650 self._generateDayStats()
651 self._generateMonthStats()
652 del self.meta_infos['start_analysis_time']
653 self._serialize(self.meta_infos, conf.META_PATH)
654 else:
655 self.logger.info('==> Analyse not started : nothing new')
656
657if __name__ == '__main__':
658 parser = argparse.ArgumentParser(description='Intelligent Web Log Analyzer')
659
660 parser.add_argument('-c', '--clean-output', dest='clean_output', action='store_true',
661 default=False,
662 help='Clean output before starting')
663
664 parser.add_argument('-i', '--stdin', dest='stdin', action='store_true',
665 default=False,
666 help='Read data from stdin instead of conf.analyzed_filename')
667
668 parser.add_argument('-f', '--file', dest='file',
669 help='Analyse this log file')
670
671 parser.add_argument('-d', '--log-level', dest='loglevel',
672 default='INFO', type=str,
673 help='Loglevel in %s, default : %s' % (['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], 'INFO'))
674
675 args = parser.parse_args()
676
677 if args.clean_output:
678 if os.path.exists(conf.DB_ROOT): shutil.rmtree(conf.DB_ROOT)
679 if os.path.exists(conf.DISPLAY_ROOT): shutil.rmtree(conf.DISPLAY_ROOT)
680
681 loglevel = getattr(logging, args.loglevel.upper(), None)
682 if not isinstance(loglevel, int):
683 raise ValueError('Invalid log level: %s' % (args.loglevel))
684
685 iwla = IWLA(loglevel)
686
687 required_conf = ['analyzed_filename', 'domain_name']
688 if not validConfRequirements(required_conf, iwla, 'Main Conf'):
689 sys.exit(0)
690
691 if args.stdin:
692 iwla.start(sys.stdin)
693 else:
694 filename = args.file or conf.analyzed_filename
695 if not os.path.exists(filename):
696 print 'No such file \'%s\'' % (filename)
697 sys.exit(-1)
698 with open(filename) as f:
699 iwla.start(f)

Archive Download this file

Branches

Tags