LEFT | RIGHT |
1 # coding: utf-8 | 1 # coding: utf-8 |
2 | 2 |
3 # This file is part of the Adblock Plus web scripts, | 3 # This file is part of the Adblock Plus web scripts, |
4 # Copyright (C) 2006-2014 Eyeo GmbH | 4 # Copyright (C) 2006-2014 Eyeo GmbH |
5 # | 5 # |
6 # Adblock Plus is free software: you can redistribute it and/or modify | 6 # Adblock Plus is free software: you can redistribute it and/or modify |
7 # it under the terms of the GNU General Public License version 3 as | 7 # it under the terms of the GNU General Public License version 3 as |
8 # published by the Free Software Foundation. | 8 # published by the Free Software Foundation. |
9 # | 9 # |
10 # Adblock Plus is distributed in the hope that it will be useful, | 10 # Adblock Plus is distributed in the hope that it will be useful, |
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of | 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
13 # GNU General Public License for more details. | 13 # GNU General Public License for more details. |
14 # | 14 # |
15 # You should have received a copy of the GNU General Public License | 15 # You should have received a copy of the GNU General Public License |
16 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. | 16 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. |
17 | 17 |
18 import re | 18 import re |
19 import time | 19 import time |
20 import posixpath | 20 import posixpath |
21 import urlparse | 21 import urlparse |
22 import multiprocessing | 22 import threading |
| 23 import traceback |
23 from ConfigParser import SafeConfigParser | 24 from ConfigParser import SafeConfigParser |
24 from sitescripts.web import url_handler | 25 from sitescripts.web import url_handler |
25 from sitescripts.extensions.utils import getDownloadLinks | 26 from sitescripts.extensions.utils import getDownloadLinks |
26 | 27 |
27 links = {} | 28 links = {} |
| 29 UPDATE_INTERVAL = 10 * 60 # 10 minutes |
28 | 30 |
29 @url_handler('/latest/') | 31 @url_handler('/latest/') |
30 def handle_request(environ, start_response): | 32 def handle_request(environ, start_response): |
31 global links | |
32 update_links() | |
33 | |
34 request = urlparse.urlparse(environ.get('REQUEST_URI', '')) | 33 request = urlparse.urlparse(environ.get('REQUEST_URI', '')) |
35 basename = posixpath.splitext(posixpath.basename(request.path))[0] | 34 basename = posixpath.splitext(posixpath.basename(request.path))[0] |
36 if basename in links: | 35 if basename in links: |
37 start_response('302 Found', [('Location', links[basename])]) | 36 start_response('302 Found', [('Location', links[basename].encode("utf-8"))]) |
38 else: | 37 else: |
39 start_response('404 Not Found', []) | 38 start_response('404 Not Found', []) |
40 return [] | 39 return [] |
41 | 40 |
42 UPDATE_INTERVAL = 10 * 60 # 10 minutes | 41 def _get_links(): |
43 update_queue = multiprocessing.Queue() | |
44 last_update = -float('Inf') | |
45 | |
46 def update_links(): | |
47 global UPDATE_INTERVAL, update_queue, links, last_update | |
48 while not update_queue.empty(): | |
49 links = update_queue.get() | |
50 | |
51 now = time.time() | |
52 if now - last_update > UPDATE_INTERVAL: | |
53 last_update = now | |
54 | |
55 process = multiprocessing.Process(target=_update_links, args=(update_queue,)
) | |
56 process.daemon = True | |
57 process.start() | |
58 | |
59 def _update_links(queue): | |
60 parser = SafeConfigParser() | 42 parser = SafeConfigParser() |
61 getDownloadLinks(parser) | 43 getDownloadLinks(parser) |
62 result = {} | 44 result = {} |
63 for section in parser.sections(): | 45 for section in parser.sections(): |
64 result[section] = parser.get(section, "downloadURL") | 46 result[section] = parser.get(section, "downloadURL") |
65 queue.put(result) | 47 return result |
66 | 48 |
67 update_links() | 49 def _update_links(): |
| 50 global links |
| 51 |
| 52 while True: |
| 53 try: |
| 54 links = _get_links() |
| 55 except: |
| 56 traceback.print_exc() |
| 57 time.sleep(UPDATE_INTERVAL) |
| 58 |
| 59 t = threading.Thread(target = _update_links) |
| 60 t.daemon = True |
| 61 t.start() |
LEFT | RIGHT |