| Left: | ||
| Right: |
| LEFT | RIGHT |
|---|---|
| 1 # coding: utf-8 | 1 # coding: utf-8 |
| 2 | 2 |
| 3 # This file is part of the Adblock Plus web scripts, | 3 # This file is part of the Adblock Plus web scripts, |
| 4 # Copyright (C) 2006-2014 Eyeo GmbH | 4 # Copyright (C) 2006-2014 Eyeo GmbH |
| 5 # | 5 # |
| 6 # Adblock Plus is free software: you can redistribute it and/or modify | 6 # Adblock Plus is free software: you can redistribute it and/or modify |
| 7 # it under the terms of the GNU General Public License version 3 as | 7 # it under the terms of the GNU General Public License version 3 as |
| 8 # published by the Free Software Foundation. | 8 # published by the Free Software Foundation. |
| 9 # | 9 # |
| 10 # Adblock Plus is distributed in the hope that it will be useful, | 10 # Adblock Plus is distributed in the hope that it will be useful, |
| 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of | 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 13 # GNU General Public License for more details. | 13 # GNU General Public License for more details. |
| 14 # | 14 # |
| 15 # You should have received a copy of the GNU General Public License | 15 # You should have received a copy of the GNU General Public License |
| 16 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. | 16 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. |
| 17 | 17 |
| 18 import re | 18 import re |
| 19 import time | 19 import time |
| 20 import posixpath | 20 import posixpath |
| 21 import urlparse | 21 import urlparse |
| 22 import multiprocessing | |
| 23 import threading | 22 import threading |
| 23 import traceback | |
| 24 from ConfigParser import SafeConfigParser | 24 from ConfigParser import SafeConfigParser |
| 25 from sitescripts.web import url_handler | 25 from sitescripts.web import url_handler |
| 26 from sitescripts.extensions.utils import getDownloadLinks | 26 from sitescripts.extensions.utils import getDownloadLinks |
| 27 | 27 |
| 28 links = {} | 28 links = {} |
| 29 UPDATE_INTERVAL = 10 * 60 # 10 minutes | 29 UPDATE_INTERVAL = 10 * 60 # 10 minutes |
| 30 | 30 |
| 31 @url_handler('/latest/') | 31 @url_handler('/latest/') |
| 32 def handle_request(environ, start_response): | 32 def handle_request(environ, start_response): |
| 33 request = urlparse.urlparse(environ.get('REQUEST_URI', '')) | 33 request = urlparse.urlparse(environ.get('REQUEST_URI', '')) |
| 34 basename = posixpath.splitext(posixpath.basename(request.path))[0] | 34 basename = posixpath.splitext(posixpath.basename(request.path))[0] |
| 35 if basename in links: | 35 if basename in links: |
| 36 start_response('302 Found', [('Location', links[basename].encode("utf-8"))]) | 36 start_response('302 Found', [('Location', links[basename].encode("utf-8"))]) |
| 37 else: | 37 else: |
| 38 start_response('404 Not Found', []) | 38 start_response('404 Not Found', []) |
| 39 return [] | 39 return [] |
| 40 | 40 |
| 41 def update_links(): | 41 def _get_links(): |
|
Sebastian Noack
2014/09/17 13:32:11
Since you are apparently fine with keeping a threa
Wladimir Palant
2014/09/17 17:45:44
That was the idea originally. However, https://sta
Sebastian Noack
2014/09/17 17:53:42
That is correct, but IMO not an issue here:
1. Th
Sebastian Noack
2014/09/17 18:13:53
Note that CPU time is distributed across all threa
| |
| 42 try: | |
| 43 pool = multiprocessing.Pool(1) | |
| 44 pool.apply_async(_update_links, callback=_set_links) | |
| 45 pool.close() | |
| 46 finally: | |
| 47 t = threading.Timer(UPDATE_INTERVAL, update_links) | |
| 48 t.daemon = True | |
| 49 t.start() | |
| 50 | |
| 51 def _update_links(): | |
| 52 parser = SafeConfigParser() | 42 parser = SafeConfigParser() |
| 53 getDownloadLinks(parser) | 43 getDownloadLinks(parser) |
| 54 result = {} | 44 result = {} |
| 55 for section in parser.sections(): | 45 for section in parser.sections(): |
| 56 result[section] = parser.get(section, "downloadURL") | 46 result[section] = parser.get(section, "downloadURL") |
| 57 return result | 47 return result |
| 58 | 48 |
| 59 def _set_links(newlinks): | 49 def _update_links(): |
| 60 global links | 50 global links |
| 61 links = newlinks | |
| 62 | 51 |
| 63 update_links() | 52 while True: |
| 53 try: | |
| 54 links = _get_links() | |
| 55 except: | |
| 56 traceback.print_exc() | |
| 57 time.sleep(UPDATE_INTERVAL) | |
| 58 | |
| 59 t = threading.Thread(target = _update_links) | |
| 60 t.daemon = True | |
| 61 t.start() | |
| LEFT | RIGHT |