Index: sitescripts/crawler/bin/extract_crawler_sites.py |
=================================================================== |
new file mode 100644 |
--- /dev/null |
+++ b/sitescripts/crawler/bin/extract_crawler_sites.py |
@@ -0,0 +1,39 @@ |
+# coding: utf-8 |
+ |
+# This Source Code is subject to the terms of the Mozilla Public License |
+# version 2.0 (the "License"). You can obtain a copy of the License at |
+# http://mozilla.org/MPL/2.0/. |
+ |
+import os, re, subprocess |
+from sitescripts.utils import get_config |
+ |
+def hg(args): |
+ return subprocess.Popen(["hg"] + args, stdout = subprocess.PIPE) |
+ |
+def extract_urls(filter_list_dir): |
+ os.chdir(filter_list_dir) |
+ process = hg(["log", "--template", "{desc}\n"]) |
+ urls = set([]) |
+ |
+ while True: |
+ line = process.stdout.readline() |
+ if line == "": |
+ break |
+ |
+ matches = re.match(r"[A-Z]:.*(https?://.*)", line) |
Wladimir Palant
2012/09/14 14:42:14
What if the URL is followed by some additional inf
|
+ if not matches: |
+ continue |
+ |
+ url = matches.group(1).strip() |
+ urls.add(url) |
+ |
+ return urls |
+ |
+def print_statements(urls): |
+ for url in urls: |
+ print "INSERT INTO crawler_sites (url) VALUES ('" + url + "');" |
Wladimir Palant
2012/09/14 14:42:14
SQL injection? :-(
|
+ |
+if __name__ == "__main__": |
+ filter_list_dir = get_config().get("crawler", "filter_list_repository") |
+ urls = extract_urls(filter_list_dir) |
+ print_statements(urls) |