resolved bug in url check

This commit is contained in:
Paul 2016-02-12 17:37:57 +01:00
parent 1088d74b46
commit 9611f8a7cc

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python2.7
import sys
import os
@ -37,12 +37,14 @@ def make_db(blacklist_files):
def compare(outline,blacklist_cache,blacklists):
result = False
for blacklist in blacklists:
while not result and outline != "":
tmpline = outline
while not result and tmpline != "":
try:
result = blacklist_cache[blacklist][outline]
result = blacklist_cache[blacklist][tmpline]
pass
except KeyError:
pass
outline = outline.partition('.')[2]
tmpline = tmpline.partition('.')[2]
return result
def squid_response(response):
@ -55,8 +57,8 @@ blacklist_files = make_list(domain_files)
blacklist_cache = make_db(blacklist_files)
while True:
l = sys.stdin.readline().strip()
outline = urlparse(l).netloc
line = sys.stdin.readline().strip()
outline = urlparse(line).netloc
if line:
if compare(outline,blacklist_cache,blacklists):
squid_response("OK")