url = b"www.google.com"
expected = "www.google.com"
- actual = get_file_by_url(url)
+ actual = get_file_by_url(url, delay=0)
self.assertEqual(actual, expected)
@mock.patch("updateHostsFile.urlopen", side_effect=mock_url_open_fail)
def test_read_url_fail(self, _):
url = b"www.google.com"
- self.assertIsNone(get_file_by_url(url))
+ self.assertIsNone(get_file_by_url(url, delay=0))
expected = "Problem getting file:"
output = sys.stdout.getvalue()
@mock.patch("updateHostsFile.urlopen", side_effect=mock_url_open_read_fail)
def test_read_url_read_fail(self, _):
url = b"www.google.com"
- self.assertIsNone(get_file_by_url(url))
+ self.assertIsNone(get_file_by_url(url, delay=0))
expected = "Problem getting file:"
output = sys.stdout.getvalue()
@mock.patch("updateHostsFile.urlopen", side_effect=mock_url_open_decode_fail)
def test_read_url_decode_fail(self, _):
url = b"www.google.com"
- self.assertIsNone(get_file_by_url(url))
+ self.assertIsNone(get_file_by_url(url, delay=0))
expected = "Problem getting file:"
output = sys.stdout.getvalue()
shutil.copyfile(example_file_path, file_path)
-def get_file_by_url(url):
+def get_file_by_url(url, retries=3, delay=10):
"""
Get a file data located at a particular URL.
format we have to encode or decode data before parsing it to UTF-8.
"""
- try:
- with urlopen(url) as f:
- soup = BeautifulSoup(f.read(), "lxml").get_text()
- return "\n".join(list(map(domain_to_idna, soup.split("\n"))))
- except Exception:
- print("Problem getting file: ", url)
+ while retries:
+ try:
+ with urlopen(url) as f:
+ soup = BeautifulSoup(f.read(), "lxml").get_text()
+ return "\n".join(list(map(domain_to_idna, soup.split("\n"))))
+ except Exception as e:
+ if 'failure in name resolution' in str(e):
+ print('No internet connection! Retrying in {} seconds'.format(delay))
+ time.sleep(delay)
+ retries -= 1
+ return print("Problem getting file: ", url)
def write_data(f, data):