Be more strict with robots : requires at least 1 hit per viewed page

This commit is contained in:
Gregory Soutade 2021-06-03 08:52:04 +02:00
parent 4cd7712201
commit 0c2ac431d1
1 changed files with 2 additions and 8 deletions

View File

@ -108,8 +108,8 @@ class IWLAPreAnalysisRobots(IPlugin):
# super_hit['robot'] = 1
# continue
# 2) pages without hit --> robot
if not super_hit['viewed_hits'][0] and super_hit['viewed_pages'][0]:
# 2) Less than 1 hit per page
if super_hit['viewed_pages'][0] and (super_hit['viewed_hits'][0] < super_hit['viewed_pages'][0]):
self._setRobot(k, super_hit)
continue
@ -118,12 +118,6 @@ class IWLAPreAnalysisRobots(IPlugin):
self._setRobot(k, super_hit)
continue
# 4) pages without hit --> robot
if not super_hit['viewed_hits'][0] and super_hit['viewed_pages'][0]:
self.logger.debug(super_hit)
self._setRobot(k, super_hit)
continue
not_found_pages = 0
for hit in super_hit['requests']:
# 5) /robots.txt read