Files
web-scraper/test_helpers.py

36 lines
1.1 KiB
Python

#!/usr/bin/env python
import unittest
from utils.helpers import (url_validation, standardise_base_url)
class TestUrls(unittest.TestCase):
base_url = "github.com"
base_url_list = (('eu.httpbin.org', 'http://eu.httpbin.org'),
('www.simonweald.com', 'https://www.simonweald.com'),
('http://www.github.com', 'http://www.github.com'))
valid_urls = ["https://www.github.com", "http://www.github.com",
"github.com", "/some/url/", "index.html"]
def test_url_standardisation(self):
'''
Tests whether a URL's protocol can be discovered if not provided.
'''
for url, target in self.base_url_list:
result = standardise_base_url(url)
self.assertEqual(result, target)
def test_url_validation(self):
'''
Passes when given a valid URL. A valid URL is qualified
by being local to the domain to be crawled.
'''
for url in self.valid_urls:
result = url_validation(self.base_url, url)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()