| Left: | ||
| Right: |
| OLD | NEW |
|---|---|
| (Empty) | |
| 1 # coding: utf-8 | |
| 2 | |
| 3 # This file is part of the Adblock Plus web scripts, | |
| 4 # Copyright (C) 2006-2012 Eyeo GmbH | |
| 5 # | |
| 6 # Adblock Plus is free software: you can redistribute it and/or modify | |
| 7 # it under the terms of the GNU General Public License version 3 as | |
| 8 # published by the Free Software Foundation. | |
| 9 # | |
| 10 # Adblock Plus is distributed in the hope that it will be useful, | |
| 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
| 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
| 13 # GNU General Public License for more details. | |
| 14 # | |
| 15 # You should have received a copy of the GNU General Public License | |
| 16 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. | |
| 17 | |
| 18 import MySQLdb, os, re, subprocess | |
| 19 from sitescripts.utils import cached, get_config | |
| 20 | |
| 21 @cached(600) | |
| 22 def _get_db(): | |
| 23 database = get_config().get("crawler", "database") | |
| 24 dbuser = get_config().get("crawler", "dbuser") | |
| 25 dbpasswd = get_config().get("crawler", "dbpassword") | |
| 26 if os.name == "nt": | |
| 27 return MySQLdb.connect(user=dbuser, passwd=dbpasswd, db=database, | |
| 28 use_unicode=True, charset="utf8", named_pipe=True) | |
| 29 else: | |
| 30 return MySQLdb.connect(user=dbuser, passwd=dbpasswd, db=database, | |
| 31 use_unicode=True, charset="utf8") | |
| 32 | |
| 33 def _get_cursor(): | |
| 34 return _get_db().cursor(MySQLdb.cursors.DictCursor) | |
| 35 | |
| 36 def _hg(args): | |
| 37 return subprocess.Popen(["hg"] + args, stdout = subprocess.PIPE) | |
| 38 | |
| 39 def _extract_sites(easylist_dir): | |
| 40 os.chdir(easylist_dir) | |
| 41 process = _hg(["log", "--template", "{desc}\n"]) | |
| 42 urls = set([]) | |
| 43 | |
| 44 for line in process.stdout: | |
| 45 match = re.search(r"\b(https?://\S*)", line) | |
| 46 if not match: | |
| 47 continue | |
| 48 | |
| 49 url = match.group(1).strip() | |
| 50 urls.add(url) | |
| 51 | |
| 52 return urls | |
| 53 | |
| 54 def _insert_sites(site_urls): | |
| 55 cursor = _get_cursor() | |
| 56 for url in site_urls: | |
| 57 cursor.execute("INSERT INTO crawler_sites (url) VALUES (%s)", url) | |
|
Wladimir Palant
2012/12/21 14:30:30
Add _get_db().commit()? Otherwise this might bite
Wladimir Palant
2012/12/21 14:30:30
Use INSERT IGNORE in case the URL is already in th
| |
| 58 | |
| 59 if __name__ == "__main__": | |
| 60 easylist_dir = get_config().get("crawler", "easylist_repository") | |
| 61 site_urls = _extract_sites(easylist_dir) | |
| 62 _insert_sites(site_urls) | |
| OLD | NEW |