resolved bug in url check

This commit is contained in:
Paul 2016-02-12 17:37:57 +01:00
parent 1088d74b46
commit 9611f8a7cc

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python2.7
import sys import sys
import os import os
@ -37,12 +37,14 @@ def make_db(blacklist_files):
def compare(outline,blacklist_cache,blacklists): def compare(outline,blacklist_cache,blacklists):
result = False result = False
for blacklist in blacklists: for blacklist in blacklists:
while not result and outline != "": tmpline = outline
while not result and tmpline != "":
try: try:
result = blacklist_cache[blacklist][outline] result = blacklist_cache[blacklist][tmpline]
pass
except KeyError: except KeyError:
pass pass
outline = outline.partition('.')[2] tmpline = tmpline.partition('.')[2]
return result return result
def squid_response(response): def squid_response(response):
@ -55,8 +57,8 @@ blacklist_files = make_list(domain_files)
blacklist_cache = make_db(blacklist_files) blacklist_cache = make_db(blacklist_files)
while True: while True:
l = sys.stdin.readline().strip() line = sys.stdin.readline().strip()
outline = urlparse(l).netloc outline = urlparse(line).netloc
if line: if line:
if compare(outline,blacklist_cache,blacklists): if compare(outline,blacklist_cache,blacklists):
squid_response("OK") squid_response("OK")