| Index: sitescripts/content_blocker_lists/bin/generate_lists.py |
| diff --git a/sitescripts/content_blocker_lists/bin/generate_lists.py b/sitescripts/content_blocker_lists/bin/generate_lists.py |
| index d86a52a95bc4edf0a14a017573c893f1f2c693bf..338e7ef3577fbaeb881468ffe9483016b1d492f4 100644 |
| --- a/sitescripts/content_blocker_lists/bin/generate_lists.py |
| +++ b/sitescripts/content_blocker_lists/bin/generate_lists.py |
| @@ -16,61 +16,82 @@ |
| # You should have received a copy of the GNU General Public License |
| # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. |
| +from contextlib import closing |
| +from datetime import datetime |
| +import json |
| import os |
| import subprocess |
| +import re |
| import urllib2 |
| from sitescripts.utils import get_config |
| -def _update_abp2blocklist(): |
| +config = dict(get_config().items("content_blocker_lists")) |
| + |
| +def update_abp2blocklist(): |
| with open(os.devnull, "w") as devnull: |
| - config = get_config() |
| - abp2blocklist_path = config.get("content_blocker_lists", |
| - "abp2blocklist_path") |
| + abp2blocklist_path = config["abp2blocklist_path"] |
| if os.path.isdir(abp2blocklist_path): |
| subprocess.check_call(("hg", "pull", "-u", "-R", abp2blocklist_path), |
| stdout=devnull) |
| else: |
| - abp2blocklist_url = config.get("content_blocker_lists", |
| - "abp2blocklist_url") |
| - subprocess.check_call(("hg", "clone", abp2blocklist_url, |
| + subprocess.check_call(("hg", "clone", config["abp2blocklist_url"], |
| abp2blocklist_path), stdout=devnull) |
| subprocess.check_call(("npm", "install"), cwd=abp2blocklist_path, |
| stdout=devnull) |
| -def _download(url_key): |
| - url = get_config().get("content_blocker_lists", url_key) |
| - response = urllib2.urlopen(url) |
| - try: |
| - return response.read() |
| - finally: |
| - response.close() |
| +def download_filter_list(url): |
| + filter_list = {} |
| + with closing(urllib2.urlopen(url)) as response: |
| + filter_list["body"] = response.read() |
| + filter_list["header"] = parse_filter_list_header(filter_list["body"]) |
| + filter_list["header"]["url"] = url |
| + return filter_list |
| -def _convert_filter_list(sources, destination_path_key): |
| - config = get_config() |
| - destination_path = config.get("content_blocker_lists", destination_path_key) |
| - with open(destination_path, "wb") as destination_file: |
| - abp2blocklist_path = config.get("content_blocker_lists", |
| - "abp2blocklist_path") |
| - process = subprocess.Popen(("node", "abp2blocklist.js"), |
| - cwd=abp2blocklist_path, stdin=subprocess.PIPE, |
| - stdout=destination_file) |
| - try: |
| - for source in sources: |
| - print >>process.stdin, source |
| - finally: |
| - process.stdin.close() |
| - process.wait() |
| +def parse_filter_list_header(filter_list): |
| + body_start = re.search(r"^[^![]", filter_list, re.MULTILINE).start() |
| + field_re = re.compile(r"^!\s*([^:\s]+):\s*(.+)$", re.MULTILINE) |
| + return { match.group(1): match.group(2) |
| + for match in field_re.finditer(filter_list, 0, body_start) } |
| +def generate_metadata(filter_lists, expires): |
| + metadata = { |
|
Sebastian Noack
2015/11/30 16:00:07
Nit: Use an OrderedDict to make sure that the meta
kzar
2015/11/30 17:06:00
Done.
|
| + "sources": [], |
| + "version": datetime.utcnow().strftime("%Y%m%d%H%M"), |
| + "expires": expires |
| + } |
| + for filter_list in filter_lists: |
| + metadata["sources"].append({ k.lower(): filter_list["header"][k] |
| + for k in ["url", "Version"]}) |
| + return metadata |
| + |
| +def write_block_list(filter_lists, path, expires): |
| + block_list = generate_metadata(filter_lists, expires) |
| + process = subprocess.Popen(("node", "abp2blocklist.js"), |
| + cwd=config["abp2blocklist_path"], |
| + stdin=subprocess.PIPE, stdout=subprocess.PIPE) |
| + try: |
| + for filter_list in filter_lists: |
| + print >>process.stdin, filter_list["body"] |
| + block_list["rules"] = json.loads(process.communicate()[0]) |
|
Sebastian Noack
2015/11/30 15:49:19
Yes, this works as abp2blocklist (currently) reads
kzar
2015/11/30 17:06:00
I've not done much threaded programming in Python
|
| + finally: |
| + process.stdin.close() |
| + process.wait() |
| if process.returncode: |
| raise Exception("abp2blocklist returned %s" % process.returncode) |
| + with open(path, "wb") as destination_file: |
| + json.dump(block_list, destination_file, |
| + sort_keys=True, indent=2, separators=(",", ": ")) |
| if __name__ == "__main__": |
| - _update_abp2blocklist() |
| + update_abp2blocklist() |
| - easylist = _download("easylist_url") |
| - exceptionrules = _download("exceptionrules_url") |
| + easylist = download_filter_list(config["easylist_url"]) |
| + exceptionrules = download_filter_list(config["exceptionrules_url"]) |
| - _convert_filter_list([easylist], "easylist_content_blocker_path") |
| - _convert_filter_list([easylist, exceptionrules], |
| - "combined_content_blocker_path") |
| + write_block_list([easylist], |
| + config["easylist_content_blocker_path"], |
| + config["easylist_content_blocker_expires"]) |
| + write_block_list([easylist, exceptionrules], |
| + config["combined_content_blocker_path"], |
| + config["combined_content_blocker_expires"]) |