From: Raymond Hettinger Date: Sat, 13 Mar 2004 20:31:33 +0000 (+0000) Subject: SF patch #911431: robot.txt must be robots.txt X-Git-Tag: v2.3.4c1~111 X-Git-Url: http://git.ipfire.org/gitweb.cgi?a=commitdiff_plain;h=d0aa2457135a0a362808b40437b768a532033f9d;p=thirdparty%2FPython%2Fcpython.git SF patch #911431: robot.txt must be robots.txt (Contributed by George Yoshida.) --- diff --git a/Lib/robotparser.py b/Lib/robotparser.py index e2af545848b0..6b23188f196e 100644 --- a/Lib/robotparser.py +++ b/Lib/robotparser.py @@ -83,7 +83,7 @@ class RobotFileParser: self.entries.append(entry) def parse(self, lines): - """parse the input lines from a robot.txt file. + """parse the input lines from a robots.txt file. We allow that a user-agent: line is not preceded by one or more blank lines.""" state = 0 @@ -148,7 +148,7 @@ class RobotFileParser: def can_fetch(self, useragent, url): """using the parsed robots.txt decide if useragent can fetch url""" - _debug("Checking robot.txt allowance for:\n user agent: %s\n url: %s" % + _debug("Checking robots.txt allowance for:\n user agent: %s\n url: %s" % (useragent, url)) if self.disallow_all: return False