2016-02-12 17:37:57 +01:00
|
|
|
#!/usr/bin/env python2.7
|
2016-01-25 14:07:49 +01:00
|
|
|
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
from urlparse import urlparse
|
2016-02-20 12:40:51 +01:00
|
|
|
|
2016-02-03 21:31:33 +01:00
|
|
|
try:
|
2016-02-20 12:40:51 +01:00
|
|
|
import config
|
2016-02-03 21:31:33 +01:00
|
|
|
except ImportError:
|
2016-02-20 12:40:51 +01:00
|
|
|
print("Please create config.py using config.py.sample")
|
|
|
|
exit()
|
2016-02-20 14:46:12 +01:00
|
|
|
try:
|
|
|
|
import cdb
|
|
|
|
except ImportError:
|
|
|
|
print("Please install python-cdb from pypi or via package manager")
|
|
|
|
exit()
|
2016-02-20 12:40:51 +01:00
|
|
|
|
2016-01-25 14:07:49 +01:00
|
|
|
|
2016-02-20 14:46:12 +01:00
|
|
|
class PySquidBlacklists:
|
|
|
|
def __init__(self, config):
|
|
|
|
self.db_backend = config.db_backend
|
2016-02-20 14:59:34 +01:00
|
|
|
self.categories = config.categories
|
|
|
|
self.base_dir = config.base_dir
|
|
|
|
self.domain_files = [os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(self.base_dir)) for f in
|
2016-02-20 14:46:12 +01:00
|
|
|
fn if re.match(r"domains*", f)]
|
|
|
|
self.blacklist_files = self.make_list()
|
2016-02-20 14:59:34 +01:00
|
|
|
self.cache = self.make_db()
|
2016-02-20 12:40:51 +01:00
|
|
|
|
2016-02-20 14:46:12 +01:00
|
|
|
def make_list(self):
|
|
|
|
blacklists = []
|
|
|
|
for l in self.domain_files:
|
|
|
|
splitlist = l.split("/")
|
|
|
|
list_type = splitlist[len(splitlist) - 2]
|
|
|
|
blacklists.append([list_type, l])
|
|
|
|
return blacklists
|
2016-02-20 12:40:51 +01:00
|
|
|
|
2016-02-20 14:46:12 +01:00
|
|
|
def make_db(self):
|
|
|
|
lib = dict()
|
|
|
|
for bls in self.blacklist_files:
|
2016-02-20 14:59:34 +01:00
|
|
|
if self.db_backend == "ram":
|
|
|
|
if bls[0] in self.categories:
|
|
|
|
cache = dict()
|
|
|
|
f = open(bls[1], "r")
|
|
|
|
for l in f:
|
|
|
|
cache[l.strip("\n")] = True
|
|
|
|
lib[bls[0]] = cache
|
|
|
|
del cache
|
2016-02-20 14:46:12 +01:00
|
|
|
return lib
|
2016-02-20 12:40:51 +01:00
|
|
|
|
2016-02-20 14:46:12 +01:00
|
|
|
def compare(self, outline):
|
|
|
|
result = False
|
2016-02-20 14:59:34 +01:00
|
|
|
for blacklist in self.cache:
|
2016-02-20 14:46:12 +01:00
|
|
|
tmpline = outline
|
|
|
|
while not result and tmpline != "":
|
|
|
|
try:
|
2016-02-20 14:59:34 +01:00
|
|
|
result = self.cache[blacklist][tmpline]
|
2016-02-20 14:46:12 +01:00
|
|
|
pass
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
tmpline = tmpline.partition('.')[2]
|
|
|
|
return result
|
2016-01-25 14:07:49 +01:00
|
|
|
|
2016-02-20 14:46:12 +01:00
|
|
|
@staticmethod
|
|
|
|
def response(r):
|
|
|
|
sys.stdout.write("%s\n" % r)
|
|
|
|
sys.stdout.flush()
|
2016-02-20 12:40:51 +01:00
|
|
|
|
2016-01-25 14:07:49 +01:00
|
|
|
|
2016-02-20 14:46:12 +01:00
|
|
|
class PySquidBlacklistsImporter:
|
|
|
|
def __init__(self, conf):
|
|
|
|
self.test = True
|
|
|
|
self.db = conf.db_backend
|
2016-01-25 14:07:49 +01:00
|
|
|
|
|
|
|
|
2016-02-20 14:46:12 +01:00
|
|
|
bli = PySquidBlacklistsImporter(config)
|
|
|
|
bl = PySquidBlacklists(config)
|
2016-01-25 14:07:49 +01:00
|
|
|
while True:
|
2016-02-20 12:40:51 +01:00
|
|
|
try:
|
2016-02-20 14:46:12 +01:00
|
|
|
line = sys.stdin.readline().strip()
|
2016-02-20 15:41:30 +01:00
|
|
|
if line == "":
|
|
|
|
exit()
|
2016-02-20 14:46:12 +01:00
|
|
|
outline = urlparse(line).netloc
|
|
|
|
if line:
|
2016-02-20 15:41:30 +01:00
|
|
|
if bl.compare(outline):
|
|
|
|
bl.response("OK")
|
|
|
|
else:
|
|
|
|
bl.response("ERR")
|
|
|
|
except IOError:
|
|
|
|
pass
|