Compare commits
2 Commits
c40c5cea50
...
273cf56a3b
| Author | SHA1 | Date | |
|---|---|---|---|
| 273cf56a3b | |||
| 1af26f50f2 |
@@ -1,37 +1,35 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
from utils.helpers import (clean_base_url)
|
from utils.helpers import (sanitise_url)
|
||||||
|
|
||||||
class TestUrls(unittest.TestCase):
|
class TestUrls(unittest.TestCase):
|
||||||
|
|
||||||
base_url = "github.com"
|
|
||||||
|
|
||||||
base_url_list = (('eu.httpbin.org', 'http://eu.httpbin.org'),
|
base_url_list = (('eu.httpbin.org', 'http://eu.httpbin.org'),
|
||||||
('www.simonweald.com', 'http://www.simonweald.com'),
|
('www.simonweald.com', 'http://www.simonweald.com'),
|
||||||
('http://www.github.com/', 'http://www.github.com'),
|
('http://www.github.com/', 'http://www.github.com'),
|
||||||
('https://www.github.com', 'https://www.github.com'))
|
('https://www.github.com', 'https://www.github.com'))
|
||||||
|
|
||||||
valid_urls = ["https://www.github.com", "http://www.github.com",
|
urls_to_clean = (('https://www.github.com/', 'https://www.github.com/'),
|
||||||
"github.com", "/some/url/", "index.html"]
|
('https://github.com/?foo=bar', 'https://github.com/'),
|
||||||
|
('https://github.com/#anchor', 'https://github.com/'))
|
||||||
|
|
||||||
|
|
||||||
def test_clean_base_url(self):
|
def test_sanitise_base_url(self):
|
||||||
'''
|
'''
|
||||||
Tests whether a URL's protocol can be discovered if not provided.
|
Tests whether a URL's protocol can be discovered if not provided.
|
||||||
'''
|
'''
|
||||||
for url, target in self.base_url_list:
|
for url, target in self.base_url_list:
|
||||||
result = clean_base_url(url)
|
result = sanitise_url(url, base_url=True)
|
||||||
self.assertEqual(result, target)
|
self.assertEqual(result, target)
|
||||||
|
|
||||||
# def test_url_validation(self):
|
def test_sanitise_url(self):
|
||||||
# '''
|
'''
|
||||||
# Passes when given a valid URL. A valid URL is qualified
|
Tests whether a URL's protocol can be discovered if not provided.
|
||||||
# by being local to the domain to be crawled.
|
'''
|
||||||
# '''
|
for url, target in self.urls_to_clean:
|
||||||
# for url in self.valid_urls:
|
result = sanitise_url(url)
|
||||||
# result = url_validation(self.base_url, url)
|
self.assertEqual(result, target)
|
||||||
# self.assertTrue(result)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -140,6 +140,9 @@ class RobotsTxt(object):
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
def __init__(self, base_url=None):
|
def __init__(self, base_url=None):
|
||||||
|
'''
|
||||||
|
Manually retrieve robots.txt to allow us to set the user-agent.
|
||||||
|
'''
|
||||||
self.base_url = base_url
|
self.base_url = base_url
|
||||||
self.headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0'}
|
self.headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0'}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user