from urllib2 import urlopen
-def getFileByUrl(url):
+def get_file_by_url(url):
try:
f = urlopen(url)
return f.read().decode("UTF-8")
except:
print("Problem getting file: ", url)
- # raise
-def writeData(f, data):
+def write_data(f, data):
if PY3:
f.write(bytes(data, "UTF-8"))
else:
f.write(str(data).encode("UTF-8"))
-# This function doesn't list hidden files
-def listdir_nohidden(path):
+
+def list_dir_no_hidden(path):
+ # This function doesn't list hidden files
return glob(os.path.join(path, "*"))
+
# Project Settings
BASEDIR_PATH = os.path.dirname(os.path.realpath(__file__))
"blacklistfile" : os.path.join(BASEDIR_PATH, "blacklist"),
"whitelistfile" : os.path.join(BASEDIR_PATH, "whitelist")}
-def main():
+def main():
parser = argparse.ArgumentParser(description="Creates a unified hosts file from hosts stored in data subfolders.")
parser.add_argument("--auto", "-a", dest="auto", default=False, action="store_true", help="Run without prompting.")
parser.add_argument("--backup", "-b", dest="backup", default=False, action="store_true", help="Backup the hosts files before they are overridden.")
parser.add_argument("--replace", "-r", dest="replace", default=False, action="store_true", help="Replace your active hosts file with this new hosts file.")
parser.add_argument("--flush-dns-cache", "-f", dest="flushdnscache", default=False, action="store_true", help="Attempt to flush DNS cache after replacing the hosts file.")
- global settings
+ global settings
options = vars(parser.parse_args())
- options["outputpath"] = os.path.join(BASEDIR_PATH, options["outputsubfolder"])
+ options["outputpath"] = os.path.join(BASEDIR_PATH,
+ options["outputsubfolder"])
options["freshen"] = not options["noupdate"]
settings = {}
settings.update(defaults)
settings.update(options)
- settings["sources"] = listdir_nohidden(settings["datapath"])
- settings["extensionsources"] = listdir_nohidden(settings["extensionspath"])
+ settings["sources"] = list_dir_no_hidden(settings["datapath"])
+ settings["extensionsources"] = list_dir_no_hidden(
+ settings["extensionspath"])
# All our extensions folders...
- settings["extensions"] = [os.path.basename(item) for item in listdir_nohidden(settings["extensionspath"])]
+ settings["extensions"] = [os.path.basename(item) for item in
+ list_dir_no_hidden(settings["extensionspath"])]
# ... intersected with the extensions passed-in as arguments, then sorted.
- settings["extensions"] = sorted( list(set(options["extensions"]).intersection(settings["extensions"])) )
+ settings["extensions"] = sorted(list(
+ set(options["extensions"]).intersection(settings["extensions"])))
with open(settings["readmedatafilename"], "r") as f:
settings["readmedata"] = json.load(f)
- promptForUpdate()
- promptForExclusions()
- mergeFile = createInitialFile()
- removeOldHostsFile()
- finalFile = removeDupsAndExcl(mergeFile)
- finalizeFile(finalFile)
+ prompt_for_update()
+ prompt_for_exclusions()
+
+ merge_file = create_initial_file()
+ remove_old_hosts_file()
+
+ final_file = remove_dups_and_excl(merge_file)
+ finalize_file(final_file)
if settings["ziphosts"]:
- zf = zipfile.ZipFile(os.path.join(settings["outputsubfolder"], "hosts.zip"), mode='w')
- zf.write(os.path.join(settings["outputsubfolder"], "hosts"), compress_type=zipfile.ZIP_DEFLATED, arcname='hosts')
+ zf = zipfile.ZipFile(os.path.join(settings["outputsubfolder"],
+ "hosts.zip"), mode='w')
+ zf.write(os.path.join(settings["outputsubfolder"], "hosts"),
+ compress_type=zipfile.ZIP_DEFLATED, arcname='hosts')
zf.close()
- updateReadmeData()
- printSuccess("Success! The hosts file has been saved in folder " + settings["outputsubfolder"] + "\nIt contains " +
- "{:,}".format(settings["numberofrules"]) + " unique entries.")
+ update_readme_data()
+ print_success("Success! The hosts file has been saved in folder " +
+ settings["outputsubfolder"] + "\nIt contains " +
+ "{:,}".format(settings["numberofrules"]) +
+ " unique entries.")
- promptForMove(finalFile)
+ prompt_for_move(final_file)
# Prompt the User
-def promptForUpdate():
+def prompt_for_update():
# Create hosts file if it doesn't exists
if not os.path.isfile(os.path.join(BASEDIR_PATH, "hosts")):
try:
open(os.path.join(BASEDIR_PATH, "hosts"), "w+").close()
except:
- printFailure("ERROR: No 'hosts' file in the folder,"
- "try creating one manually")
+ print_failure("ERROR: No 'hosts' file in the folder,"
+ "try creating one manually")
if not settings["freshen"]:
return
prompt = "Do you want to update all data sources?"
if settings["auto"] or query_yes_no(prompt):
- updateAllSources()
+ update_all_sources()
elif not settings["auto"]:
print("OK, we'll stick with what we've got locally.")
-def promptForExclusions():
+def prompt_for_exclusions():
prompt = ("Do you want to exclude any domains?\n"
"For example, hulu.com video streaming must be able to access "
"its tracking and ad servers in order to play video.")
if not settings["auto"]:
if query_yes_no(prompt):
- displayExclusionOptions()
+ display_exclusion_options()
else:
print("OK, we'll only exclude domains in the whitelist.")
-def promptForFlushDnsCache():
+def prompt_for_flush_dns_cache():
if settings["flushdnscache"]:
flush_dns_cache()
flush_dns_cache()
-def promptForMove(finalFile):
+def prompt_for_move(final_file):
if settings["replace"] and not settings["skipstatichosts"]:
move_file = True
elif settings["auto"] or settings["skipstatichosts"]:
move_file = query_yes_no(prompt)
if move_file:
- move_hosts_file_into_place(finalFile)
- promptForFlushDnsCache()
+ move_hosts_file_into_place(final_file)
+ prompt_for_flush_dns_cache()
else:
return False
# End Prompt the User
# Exclusion logic
-def displayExclusionOptions():
- for exclusionOption in settings["commonexclusions"]:
- prompt = "Do you want to exclude the domain " + exclusionOption + " ?"
+def display_exclusion_options():
+ for exclusion_option in settings["commonexclusions"]:
+ prompt = "Do you want to exclude the domain " + exclusion_option + " ?"
if query_yes_no(prompt):
- excludeDomain(exclusionOption)
+ exclude_domain(exclusion_option)
else:
continue
"to exclude (e.g. facebook.com): ")
user_domain = raw_input(domain_prompt)
- if isValidDomainFormat(user_domain):
- excludeDomain(user_domain)
+ if is_valid_domain_format(user_domain):
+ exclude_domain(user_domain)
continue_prompt = "Do you have more domains you want to enter?"
if not query_yes_no(continue_prompt):
return
-def excludeDomain(domain):
- settings["exclusionregexs"].append(re.compile(settings["exclusionpattern"] + domain))
+def exclude_domain(domain):
+ settings["exclusionregexs"].append(re.compile(
+ settings["exclusionpattern"] + domain))
-def matchesExclusions(strippedRule):
- strippedDomain = strippedRule.split()[1]
+
+def matches_exclusions(stripped_rule):
+ stripped_domain = stripped_rule.split()[1]
for exclusionRegex in settings["exclusionregexs"]:
- if exclusionRegex.search(strippedDomain):
+ if exclusionRegex.search(stripped_domain):
return True
return False
# End Exclusion Logic
+
# Update Logic
-def updateAllSources():
+def update_all_sources():
# Update all hosts files regardless of folder depth
- # allsources = glob('*/**/' + settings["sourcedatafilename"], recursive=True)
- allsources = recursiveGlob("*", settings["sourcedatafilename"])
- for source in allsources:
- updateFile = open(source, "r")
- updateData = json.load(updateFile)
- updateURL = updateData["url"]
- updateFile.close()
+ all_sources = recursive_glob("*", settings["sourcedatafilename"])
- print("Updating source " + os.path.dirname(source) + " from " + updateURL)
+ for source in all_sources:
+ update_file = open(source, "r")
+ update_data = json.load(update_file)
+ update_url = update_data["url"]
+ update_file.close()
- updatedFile = getFileByUrl(updateURL)
+ print("Updating source " + os.path.dirname(
+ source) + " from " + update_url)
- try:
- updatedFile = updatedFile.replace("\r", "") #get rid of carriage-return symbols
+ updated_file = get_file_by_url(update_url)
- hostsFile = open(os.path.join(BASEDIR_PATH, os.path.dirname(source), settings["hostfilename"]), "wb")
- writeData(hostsFile, updatedFile)
- hostsFile.close()
+ try:
+ # get rid of carriage-return symbols
+ updated_file = updated_file.replace("\r", "")
+
+ hosts_file = open(os.path.join(BASEDIR_PATH,
+ os.path.dirname(source),
+ settings["hostfilename"]), "wb")
+ write_data(hosts_file, updated_file)
+ hosts_file.close()
except:
print("Skipping.")
# End Update Logic
+
# File Logic
-def createInitialFile():
- mergeFile = tempfile.NamedTemporaryFile()
+def create_initial_file():
+ merge_file = tempfile.NamedTemporaryFile()
# spin the sources for the base file
- for source in recursiveGlob(settings["datapath"], settings["hostfilename"]):
+ for source in recursive_glob(settings["datapath"],
+ settings["hostfilename"]):
with open(source, "r") as curFile:
- writeData(mergeFile, curFile.read())
+ write_data(merge_file, curFile.read())
- for source in recursiveGlob(settings["datapath"], settings["sourcedatafilename"]):
- updateFile = open(source, "r")
- updateData = json.load(updateFile)
- settings["sourcesdata"].append(updateData)
- updateFile.close()
+ for source in recursive_glob(settings["datapath"],
+ settings["sourcedatafilename"]):
+ update_file = open(source, "r")
+ update_data = json.load(update_file)
+ settings["sourcesdata"].append(update_data)
+ update_file.close()
# spin the sources for extensions to the base file
for source in settings["extensions"]:
- # filename = os.path.join(settings["extensionspath"], source, settings["hostfilename"])
- for filename in recursiveGlob(os.path.join(settings["extensionspath"], source), settings["hostfilename"]):
+ for filename in recursive_glob(os.path.join(
+ settings["extensionspath"], source), settings["hostfilename"]):
with open(filename, "r") as curFile:
- writeData(mergeFile, curFile.read())
+ write_data(merge_file, curFile.read())
+
+ for update_file_path in recursive_glob(os.path.join(
+ settings["extensionspath"], source),
+ settings["sourcedatafilename"]):
+ update_file = open(update_file_path, "r")
+ update_data = json.load(update_file)
- # updateFilePath = os.path.join(settings["extensionspath"], source, settings["sourcedatafilename"])
- for updateFilePath in recursiveGlob( os.path.join(settings["extensionspath"], source), settings["sourcedatafilename"]):
- updateFile = open(updateFilePath, "r")
- updateData = json.load(updateFile)
- settings["sourcesdata"].append(updateData)
- updateFile.close()
+ settings["sourcesdata"].append(update_data)
+ update_file.close()
if os.path.isfile(settings["blacklistfile"]):
with open(settings["blacklistfile"], "r") as curFile:
- writeData(mergeFile, curFile.read())
+ write_data(merge_file, curFile.read())
- return mergeFile
+ return merge_file
-def removeDupsAndExcl(mergeFile):
- numberOfRules = settings["numberofrules"]
+
+def remove_dups_and_excl(merge_file):
+ number_of_rules = settings["numberofrules"]
if os.path.isfile(settings["whitelistfile"]):
with open(settings["whitelistfile"], "r") as ins:
for line in ins:
os.makedirs(settings["outputpath"])
# Another mode is required to read and write the file in Python 3
- finalFile = open(os.path.join(settings["outputpath"], "hosts"),
- "w+b" if PY3 else "w+")
+ final_file = open(os.path.join(settings["outputpath"], "hosts"),
+ "w+b" if PY3 else "w+")
- mergeFile.seek(0) # reset file pointer
- hostnames = set(["localhost", "localhost.localdomain", "local", "broadcasthost"])
+ merge_file.seek(0) # reset file pointer
+ hostnames = {"localhost", "localhost.localdomain",
+ "local", "broadcasthost"}
exclusions = settings["exclusions"]
- for line in mergeFile.readlines():
+
+ for line in merge_file.readlines():
write = "true"
+
# Explicit encoding
line = line.decode("UTF-8")
+
# replace tabs with space
line = line.replace("\t+", " ")
- # Trim trailing whitespace, periods -- (Issue #271 - https://github.com/StevenBlack/hosts/issues/271)
+
+ # see gh-271: trim trailing whitespace, periods
line = line.rstrip(' .') + "\n"
+
# Testing the first character doesn't require startswith
if line[0] == "#" or re.match(r'^\s*$', line[0]):
- writeData(finalFile, line)
+ write_data(final_file, line)
continue
if "::1" in line:
continue
- strippedRule = stripRule(line) #strip comments
- if not strippedRule or matchesExclusions(strippedRule):
+ stripped_rule = strip_rule(line) # strip comments
+ if not stripped_rule or matches_exclusions(stripped_rule):
continue
- hostname, normalizedRule = normalizeRule(strippedRule) # normalize rule
+
+ # Normalize rule
+ hostname, normalized_rule = normalize_rule(stripped_rule)
for exclude in exclusions:
if exclude in line:
write = "false"
break
- if normalizedRule and (hostname not in hostnames) and (write == "true"):
- writeData(finalFile, normalizedRule)
+
+ if (normalized_rule and (hostname not in hostnames)
+ and (write == "true")):
+ write_data(final_file, normalized_rule)
hostnames.add(hostname)
- numberOfRules += 1
+ number_of_rules += 1
- settings["numberofrules"] = numberOfRules
- mergeFile.close()
+ settings["numberofrules"] = number_of_rules
+ merge_file.close()
- return finalFile
+ return final_file
-def normalizeRule(rule):
+
+def normalize_rule(rule):
result = re.search(r'^[ \t]*(\d+\.\d+\.\d+\.\d+)\s+([\w\.-]+)(.*)', rule)
if result:
- hostname, suffix = result.group(2,3)
- hostname = hostname.lower().strip() # explicitly lowercase and trim the hostname
+ hostname, suffix = result.group(2, 3)
+
+ # Explicitly lowercase and trim the hostname
+ hostname = hostname.lower().strip()
if suffix and settings["keepdomaincomments"]:
# add suffix as comment only, not as a separate host
- return hostname, "%s %s #%s\n" % (settings["targetip"], hostname, suffix)
+ return hostname, "%s %s #%s\n" % (settings["targetip"],
+ hostname, suffix)
else:
return hostname, "%s %s\n" % (settings["targetip"], hostname)
print("==>%s<==" % rule)
return None, None
-def finalizeFile(finalFile):
- writeOpeningHeader(finalFile)
- finalFile.close()
-# Some sources put comments around their rules, for accuracy we need to strip them
-# the comments are preserved in the output hosts file
-def stripRule(line):
- splitLine = line.split()
- if len(splitLine) < 2 :
+def finalize_file(final_file):
+ write_opening_header(final_file)
+ final_file.close()
+
+
+# Some sources put comments around their rules, for accuracy we need
+# to strip them the comments are preserved in the output hosts file
+def strip_rule(line):
+ split_line = line.split()
+ if len(split_line) < 2:
# just return blank
return ""
else:
- return splitLine[0] + " " + splitLine[1]
-
-def writeOpeningHeader(finalFile):
- finalFile.seek(0) #reset file pointer
- fileContents = finalFile.read() #save content
- finalFile.seek(0) #write at the top
- writeData(finalFile, "# This hosts file is a merged collection of hosts from reputable sources,\n")
- writeData(finalFile, "# with a dash of crowd sourcing via Github\n#\n")
- writeData(finalFile, "# Date: " + time.strftime("%B %d %Y", time.gmtime()) + "\n")
+ return split_line[0] + " " + split_line[1]
+
+
+def write_opening_header(final_file):
+ final_file.seek(0) # reset file pointer
+ file_contents = final_file.read() # save content
+ final_file.seek(0) # write at the top
+ write_data(final_file, "# This hosts file is a merged collection "
+ "of hosts from reputable sources,\n")
+ write_data(final_file, "# with a dash of crowd sourcing via Github\n#\n")
+ write_data(final_file, "# Date: " + time.strftime(
+ "%B %d %Y", time.gmtime()) + "\n")
if settings["extensions"]:
- writeData(finalFile, "# Extensions added to this file: " + ", ".join(settings["extensions"]) + "\n")
- writeData(finalFile, "# Number of unique domains: " + "{:,}\n#\n".format(settings["numberofrules"]))
- writeData(finalFile, "# Fetch the latest version of this file: https://raw.githubusercontent.com/StevenBlack/hosts/master/"+ os.path.join(settings["outputsubfolder"],"") + "hosts\n")
- writeData(finalFile, "# Project home page: https://github.com/StevenBlack/hosts\n#\n")
- writeData(finalFile, "# ===============================================================\n")
- writeData(finalFile, "\n")
+ write_data(final_file, "# Extensions added to this file: " + ", ".join(
+ settings["extensions"]) + "\n")
+ write_data(final_file, "# Number of unique domains: " + "{:,}\n#\n".format(
+ settings["numberofrules"]))
+ write_data(final_file, "# Fetch the latest version of this file: "
+ "https://raw.githubusercontent.com/"
+ "StevenBlack/hosts/master/" +
+ os.path.join(settings["outputsubfolder"], "") + "hosts\n")
+ write_data(final_file, "# Project home page: https://github.com/"
+ "StevenBlack/hosts\n#\n")
+ write_data(final_file, "# ==============================="
+ "================================\n")
+ write_data(final_file, "\n")
if not settings["skipstatichosts"]:
- writeData(finalFile, "127.0.0.1 localhost\n")
- writeData(finalFile, "127.0.0.1 localhost.localdomain\n")
- writeData(finalFile, "127.0.0.1 local\n")
- writeData(finalFile, "255.255.255.255 broadcasthost\n")
- writeData(finalFile, "::1 localhost\n")
- writeData(finalFile, "fe80::1%lo0 localhost\n")
- writeData(finalFile, "0.0.0.0 0.0.0.0\n")
+ write_data(final_file, "127.0.0.1 localhost\n")
+ write_data(final_file, "127.0.0.1 localhost.localdomain\n")
+ write_data(final_file, "127.0.0.1 local\n")
+ write_data(final_file, "255.255.255.255 broadcasthost\n")
+ write_data(final_file, "::1 localhost\n")
+ write_data(final_file, "fe80::1%lo0 localhost\n")
+ write_data(final_file, "0.0.0.0 0.0.0.0\n")
if platform.system() == "Linux":
- writeData(finalFile, "127.0.1.1 " + socket.gethostname() + "\n")
- writeData(finalFile, "127.0.0.53 " + socket.gethostname() + "\n")
- writeData(finalFile, "\n")
+ write_data(final_file, "127.0.1.1 " + socket.gethostname() + "\n")
+ write_data(final_file, "127.0.0.53 " + socket.gethostname() + "\n")
+ write_data(final_file, "\n")
preamble = os.path.join(BASEDIR_PATH, "myhosts")
if os.path.isfile(preamble):
with open(preamble, "r") as f:
- writeData(finalFile, f.read())
+ write_data(final_file, f.read())
- finalFile.write(fileContents)
+ final_file.write(file_contents)
-def updateReadmeData():
- extensionsKey = "base"
- hostsLocation = ""
+
+def update_readme_data():
+ extensions_key = "base"
if settings["extensions"]:
- extensionsKey = "-".join(settings["extensions"])
+ extensions_key = "-".join(settings["extensions"])
- generationData = {"location": os.path.join(settings["outputsubfolder"], ""),
- "entries": settings["numberofrules"],
- "sourcesdata": settings["sourcesdata"]}
- settings["readmedata"][extensionsKey] = generationData
+ generation_data = {"location": os.path.join(
+ settings["outputsubfolder"], ""),
+ "entries": settings["numberofrules"],
+ "sourcesdata": settings["sourcesdata"]}
+ settings["readmedata"][extensions_key] = generation_data
with open(settings["readmedatafilename"], "w") as f:
json.dump(settings["readmedata"], f)
Move the newly-created hosts file into its correct location on the OS.
For UNIX systems, the hosts file is "etc/hosts." On Windows, it's
- "C:\Windows\system32\drivers\etc\hosts."
+ "C:\Windows\System32\drivers\etc\hosts."
For this move to work, you must have administrator privileges to do this.
On UNIX systems, this means having "sudo" access, and on Windows, it
Parameters
----------
- final_file : str
- The name of the newly-created hosts file to move.
+ final_file : file object
+ The newly-created hosts file to move.
"""
filename = os.path.abspath(final_file.name)
print("Moving the file requires administrative privileges. "
"You might need to enter your password.")
if subprocess.call(["/usr/bin/sudo", "cp", filename, "/etc/hosts"]):
- printFailure("Moving the file failed.")
+ print_failure("Moving the file failed.")
elif os.name == "nt":
print("Automatically moving the hosts file "
"in place is not yet supported.")
if platform.system() == "Darwin":
if subprocess.call(["/usr/bin/sudo", "killall",
"-HUP", "mDNSResponder"]):
- printFailure("Flushing the DNS cache failed.")
+ print_failure("Flushing the DNS cache failed.")
elif os.name == "nt":
print("Automatically flushing the DNS cache is not yet supported.")
print("Please copy and paste the command 'ipconfig /flushdns' in "
if subprocess.call(["/usr/bin/sudo", "/etc/rc.d/init.d/nscd",
"restart"]):
- printFailure("Flushing the DNS cache failed.")
+ print_failure("Flushing the DNS cache failed.")
else:
- printSuccess("Flushing DNS by restarting nscd succeeded")
+ print_success("Flushing DNS by restarting nscd succeeded")
if os.path.isfile("/usr/lib/systemd/system/NetworkManager.service"):
dns_cache_found = True
if subprocess.call(["/usr/bin/sudo", "/usr/bin/systemctl",
"restart", "NetworkManager.service"]):
- printFailure("Flushing the DNS cache failed.")
+ print_failure("Flushing the DNS cache failed.")
else:
- printSuccess("Flushing DNS by restarting "
- "NetworkManager succeeded")
+ print_success("Flushing DNS by restarting "
+ "NetworkManager succeeded")
if os.path.isfile("/usr/lib/systemd/system/wicd.service"):
dns_cache_found = True
if subprocess.call(["/usr/bin/sudo", "/usr/bin/systemctl",
"restart", "wicd.service"]):
- printFailure("Flushing the DNS cache failed.")
+ print_failure("Flushing the DNS cache failed.")
else:
- printSuccess("Flushing DNS by restarting wicd succeeded")
+ print_success("Flushing DNS by restarting wicd succeeded")
if os.path.isfile("/usr/lib/systemd/system/dnsmasq.service"):
dns_cache_found = True
if subprocess.call(["/usr/bin/sudo", "/usr/bin/systemctl",
"restart", "dnsmasq.service"]):
- printFailure("Flushing the DNS cache failed.")
+ print_failure("Flushing the DNS cache failed.")
else:
- printSuccess("Flushing DNS by restarting dnsmasq succeeded")
+ print_success("Flushing DNS by restarting dnsmasq succeeded")
if os.path.isfile("/usr/lib/systemd/system/networking.service"):
dns_cache_found = True
if subprocess.call(["/usr/bin/sudo", "/usr/bin/systemctl",
"restart", "networking.service"]):
- printFailure("Flushing the DNS cache failed.")
+ print_failure("Flushing the DNS cache failed.")
else:
- printSuccess("Flushing DNS by restarting "
- "networking.service succeeded")
+ print_success("Flushing DNS by restarting "
+ "networking.service succeeded")
if not dns_cache_found:
- printFailure("Unable to determine DNS management tool.")
+ print_failure("Unable to determine DNS management tool.")
-def removeOldHostsFile(): # hotfix since merging with an already existing hosts file leads to artefacts and duplicates
- oldFilePath = os.path.join(BASEDIR_PATH, "hosts")
- open(oldFilePath, "a").close() # create if already removed, so remove wont raise an error
+# Hotfix since merging with an already existing
+# hosts file leads to artifacts and duplicates
+def remove_old_hosts_file():
+ old_file_path = os.path.join(BASEDIR_PATH, "hosts")
+ # create if already removed, so remove wont raise an error
+ open(old_file_path, "a").close()
if settings["backup"]:
- backupFilePath = os.path.join(BASEDIR_PATH, "hosts-{}".format(time.strftime("%Y-%m-%d-%H-%M-%S")))
- shutil.copy(oldFilePath, backupFilePath) # make a backup copy, marking the date in which the list was updated
+ backup_file_path = os.path.join(BASEDIR_PATH, "hosts-{}".format(
+ time.strftime("%Y-%m-%d-%H-%M-%S")))
- os.remove(oldFilePath)
- open(oldFilePath, "a").close() # create new empty hostsfile
+ # Make a backup copy, marking the date in which the list was updated
+ shutil.copy(old_file_path, backup_file_path)
+ os.remove(old_file_path)
+ # Create new empty hosts file
+ open(old_file_path, "a").close()
# End File Logic
+
# Helper Functions
def query_yes_no(question, default="yes"):
"""
reply = None
while not reply:
- sys.stdout.write(colorize(question, colors.PROMPT) + prompt)
+ sys.stdout.write(colorize(question, Colors.PROMPT) + prompt)
choice = raw_input().lower()
reply = None
elif choice in valid:
reply = valid[choice]
else:
- printFailure("Please respond with 'yes' or 'no' "
- "(or 'y' or 'n').\n")
+ print_failure("Please respond with 'yes' or 'no' "
+ "(or 'y' or 'n').\n")
return reply == "yes"
-def isValidDomainFormat(domain):
+def is_valid_domain_format(domain):
if domain == "":
print("You didn't enter a domain. Try again.")
return False
- domainRegex = re.compile("www\d{0,3}[.]|https?")
- if domainRegex.match(domain):
+
+ domain_regex = re.compile("www\d{0,3}[.]|https?")
+
+ if domain_regex.match(domain):
print("The domain " + domain +
" is not valid. Do not include "
"www.domain.com or http(s)://domain.com. Try again.")
else:
return True
+
# A version-independent glob( ... "/**/" ... )
-def recursiveGlob(stem, filepattern):
- if sys.version_info >= (3,5):
- return glob(stem + "/**/" + filepattern, recursive=True)
+def recursive_glob(stem, file_pattern):
+ if sys.version_info >= (3, 5):
+ return glob(stem + "/**/" + file_pattern, recursive=True)
else:
if stem == "*":
stem = "."
matches = []
for root, dirnames, filenames in os.walk(stem):
- for filename in fnmatch.filter(filenames, filepattern):
+ for filename in fnmatch.filter(filenames, file_pattern):
matches.append(os.path.join(root, filename))
return matches
# Colors
-class colors:
- PROMPT = "\033[94m"
+class Colors(object):
+ PROMPT = "\033[94m"
SUCCESS = "\033[92m"
- FAIL = "\033[91m"
- ENDC = "\033[0m"
+ FAIL = "\033[91m"
+ ENDC = "\033[0m"
+
def colorize(text, color):
- return color + text + colors.ENDC
+ return color + text + Colors.ENDC
+
+
+def print_success(text):
+ print(colorize(text, Colors.SUCCESS))
-def printSuccess(text):
- print(colorize(text, colors.SUCCESS))
-def printFailure(text):
- print(colorize(text, colors.FAIL))
+def print_failure(text):
+ print(colorize(text, Colors.FAIL))
# End Helper Functions
if __name__ == "__main__":