Compare commits

...

2 Commits

Author SHA1 Message Date
273cf56a3b add some basic tests 2018-09-11 13:42:15 +01:00
1af26f50f2 added a docstring 2018-09-11 13:42:02 +01:00
2 changed files with 16 additions and 15 deletions

View File

@@ -1,37 +1,35 @@
#!/usr/bin/env python
import unittest
from utils.helpers import (clean_base_url)
from utils.helpers import (sanitise_url)
class TestUrls(unittest.TestCase):
base_url = "github.com"
base_url_list = (('eu.httpbin.org', 'http://eu.httpbin.org'),
('www.simonweald.com', 'http://www.simonweald.com'),
('http://www.github.com/', 'http://www.github.com'),
('https://www.github.com', 'https://www.github.com'))
valid_urls = ["https://www.github.com", "http://www.github.com",
"github.com", "/some/url/", "index.html"]
urls_to_clean = (('https://www.github.com/', 'https://www.github.com/'),
('https://github.com/?foo=bar', 'https://github.com/'),
('https://github.com/#anchor', 'https://github.com/'))
def test_clean_base_url(self):
def test_sanitise_base_url(self):
'''
Tests whether a URL's protocol can be discovered if not provided.
'''
for url, target in self.base_url_list:
result = clean_base_url(url)
result = sanitise_url(url, base_url=True)
self.assertEqual(result, target)
# def test_url_validation(self):
# '''
# Passes when given a valid URL. A valid URL is qualified
# by being local to the domain to be crawled.
# '''
# for url in self.valid_urls:
# result = url_validation(self.base_url, url)
# self.assertTrue(result)
def test_sanitise_url(self):
'''
Tests whether a URL's protocol can be discovered if not provided.
'''
for url, target in self.urls_to_clean:
result = sanitise_url(url)
self.assertEqual(result, target)
if __name__ == '__main__':

View File

@@ -140,6 +140,9 @@ class RobotsTxt(object):
'''
def __init__(self, base_url=None):
'''
Manually retrieve robots.txt to allow us to set the user-agent.
'''
self.base_url = base_url
self.headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0'}