aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/Lib/test/test_robotparser.py
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/test/test_robotparser.py')
-rw-r--r--Lib/test/test_robotparser.py34
1 files changed, 18 insertions, 16 deletions
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py
index b3d4a46056b..178761dd381 100644
--- a/Lib/test/test_robotparser.py
+++ b/Lib/test/test_robotparser.py
@@ -1,6 +1,9 @@
-import unittest, StringIO, robotparser
-from test import test_support
-from urllib2 import urlopen, HTTPError
+import io
+import unittest
+import urllib.robotparser
+from urllib.error import URLError, HTTPError
+from urllib.request import urlopen
+from test import support
class RobotTestCase(unittest.TestCase):
def __init__(self, index, parser, url, good, agent):
@@ -33,8 +36,8 @@ tests = unittest.TestSuite()
def RobotTest(index, robots_txt, good_urls, bad_urls,
agent="test_robotparser"):
- lines = StringIO.StringIO(robots_txt).readlines()
- parser = robotparser.RobotFileParser()
+ lines = io.StringIO(robots_txt).readlines()
+ parser = urllib.robotparser.RobotFileParser()
parser.parse(lines)
for url in good_urls:
tests.addTest(RobotTestCase(index, parser, url, 1, agent))
@@ -232,8 +235,8 @@ RobotTest(15, doc, good, bad)
class NetworkTestCase(unittest.TestCase):
def testPasswordProtectedSite(self):
- test_support.requires('network')
- with test_support.transient_internet('mueblesmoraleda.com'):
+ support.requires('network')
+ with support.transient_internet('mueblesmoraleda.com'):
url = 'http://mueblesmoraleda.com'
robots_url = url + "/robots.txt"
# First check the URL is usable for our purposes, since the
@@ -249,28 +252,27 @@ class NetworkTestCase(unittest.TestCase):
self.skipTest(
"%r should return a 401 or 403 HTTP error, not succeed"
% (robots_url))
- parser = robotparser.RobotFileParser()
+ parser = urllib.robotparser.RobotFileParser()
parser.set_url(url)
try:
parser.read()
- except IOError:
+ except URLError:
self.skipTest('%s is unavailable' % url)
self.assertEqual(parser.can_fetch("*", robots_url), False)
def testPythonOrg(self):
- test_support.requires('network')
- with test_support.transient_internet('www.python.org'):
- parser = robotparser.RobotFileParser(
+ support.requires('network')
+ with support.transient_internet('www.python.org'):
+ parser = urllib.robotparser.RobotFileParser(
"http://www.python.org/robots.txt")
parser.read()
self.assertTrue(
parser.can_fetch("*", "http://www.python.org/robots.txt"))
-
def test_main():
- test_support.run_unittest(tests)
- test_support.run_unittest(NetworkTestCase)
+ support.run_unittest(NetworkTestCase)
+ support.run_unittest(tests)
if __name__=='__main__':
- test_support.verbose = 1
+ support.verbose = 1
test_main()