| OLD | NEW |
| (Empty) |
| 1 # This file is part of the Adblock Plus web scripts, | |
| 2 # Copyright (C) 2006-present eyeo GmbH | |
| 3 # | |
| 4 # Adblock Plus is free software: you can redistribute it and/or modify | |
| 5 # it under the terms of the GNU General Public License version 3 as | |
| 6 # published by the Free Software Foundation. | |
| 7 # | |
| 8 # Adblock Plus is distributed in the hope that it will be useful, | |
| 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
| 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
| 11 # GNU General Public License for more details. | |
| 12 # | |
| 13 # You should have received a copy of the GNU General Public License | |
| 14 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. | |
| 15 | |
| 16 import re | |
| 17 import sys | |
| 18 import os | |
| 19 from urlparse import urlparse | |
| 20 import eventlet | |
| 21 from eventlet.green import urllib2 | |
| 22 from sitescripts.utils import get_config, get_template, setupStderr | |
| 23 import sitescripts.subscriptions.subscriptionParser as subscriptionParser | |
| 24 | |
| 25 | |
| 26 def checkURL(url): | |
| 27 try: | |
| 28 result = urllib2.urlopen(url, timeout=60).read(1) | |
| 29 return (url, True) | |
| 30 except urllib2.HTTPError as e: | |
| 31 return (url, e.code) | |
| 32 except: | |
| 33 return (url, False) | |
| 34 | |
| 35 | |
| 36 def checkSite(site): | |
| 37 try: | |
| 38 result = urllib2.urlopen('http://downforeveryoneorjustme.com/' + site, t
imeout=60).read() | |
| 39 if re.search(r'\blooks down\b', result): | |
| 40 return (site, False) | |
| 41 else: | |
| 42 return (site, True) | |
| 43 except: | |
| 44 return (site, True) | |
| 45 | |
| 46 | |
| 47 def checkSubscriptions(): | |
| 48 subscriptions = subscriptionParser.readSubscriptions().values() | |
| 49 subscriptions.sort(key=lambda s: s.name.lower()) | |
| 50 | |
| 51 urls = {} | |
| 52 sites = {} | |
| 53 for subscription in subscriptions: | |
| 54 for key in ('homepage', 'forum', 'blog', 'faq', 'contact', 'changelog',
'policy'): | |
| 55 url = getattr(subscription, key) | |
| 56 if url != None: | |
| 57 urls[url] = True | |
| 58 for title, url, complete in subscription.variants: | |
| 59 urls[url] = True | |
| 60 | |
| 61 pool = eventlet.GreenPool() | |
| 62 for url, result in pool.imap(checkURL, urls.iterkeys()): | |
| 63 urls[url] = result | |
| 64 if result is False: | |
| 65 sites[urlparse(url).netloc] = True | |
| 66 for site, result in pool.imap(checkSite, sites.iterkeys()): | |
| 67 sites[site] = result | |
| 68 | |
| 69 result = [] | |
| 70 for subscription in subscriptions: | |
| 71 s = {'name': subscription.name, 'links': []} | |
| 72 result.append(s) | |
| 73 for key in ('homepage', 'forum', 'blog', 'faq', 'contact', 'changelog',
'policy'): | |
| 74 url = getattr(subscription, key) | |
| 75 if url != None: | |
| 76 site = urlparse(url).netloc | |
| 77 s['links'].append({ | |
| 78 'url': url, | |
| 79 'title': key[0].upper() + key[1:], | |
| 80 'result': urls[url], | |
| 81 'siteResult': site in sites and sites[site], | |
| 82 }) | |
| 83 for title, url, complete in subscription.variants: | |
| 84 site = urlparse(url).netloc | |
| 85 s['links'].append({ | |
| 86 'url': url, | |
| 87 'title': title, | |
| 88 'result': urls[url], | |
| 89 'siteResult': site in sites and sites[site], | |
| 90 }) | |
| 91 return result | |
| 92 | |
| 93 | |
| 94 if __name__ == '__main__': | |
| 95 setupStderr() | |
| 96 | |
| 97 subscriptions = checkSubscriptions() | |
| 98 outputFile = get_config().get('subscriptions', 'statusPage') | |
| 99 template = get_template(get_config().get('subscriptions', 'statusTemplate')) | |
| 100 template.stream({'subscriptions': subscriptions}).dump(outputFile, encoding=
'utf-8') | |
| OLD | NEW |