From 73fd46d24e45c34f0fb87261e5471584a7c273df Mon Sep 17 00:00:00 2001 From: Jeremy Hylton Date: Fri, 18 Jul 2008 20:59:44 +0000 Subject: Bug 3347: robotparser failed because it didn't convert bytes to string. The solution is to convert bytes to text via utf-8. I'm not entirely sure if this is safe, but it looks like robots.txt is expected to be ascii. --- Lib/urllib/robotparser.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'Lib/urllib/robotparser.py') diff --git a/Lib/urllib/robotparser.py b/Lib/urllib/robotparser.py index a91df8d815..c55fb5082f 100644 --- a/Lib/urllib/robotparser.py +++ b/Lib/urllib/robotparser.py @@ -60,7 +60,8 @@ class RobotFileParser: elif err.code >= 400: self.allow_all = True else: - self.parse(f.read().splitlines()) + raw = f.read() + self.parse(raw.decode("utf-8").splitlines()) def _add_entry(self, entry): if "*" in entry.useragents: @@ -123,7 +124,10 @@ class RobotFileParser: return True # search for given user agent matches # the first match counts - url = urllib.parse.quote(urllib.parse.urlparse(urllib.parse.unquote(url))[2]) or "/" + url = urllib.parse.quote( + urllib.parse.urlparse(urllib.parse.unquote(url))[2]) + if not url: + url = "/" for entry in self.entries: if entry.applies_to(useragent): return entry.allowance(url) -- cgit v1.2.1