Left: | ||
Right: |
LEFT | RIGHT |
---|---|
1 # coding: utf-8 | 1 # coding: utf-8 |
2 | 2 |
3 # This file is part of the Adblock Plus web scripts, | 3 # This file is part of the Adblock Plus web scripts, |
4 # Copyright (C) 2006-2014 Eyeo GmbH | 4 # Copyright (C) 2006-2014 Eyeo GmbH |
5 # | 5 # |
6 # Adblock Plus is free software: you can redistribute it and/or modify | 6 # Adblock Plus is free software: you can redistribute it and/or modify |
7 # it under the terms of the GNU General Public License version 3 as | 7 # it under the terms of the GNU General Public License version 3 as |
8 # published by the Free Software Foundation. | 8 # published by the Free Software Foundation. |
9 # | 9 # |
10 # Adblock Plus is distributed in the hope that it will be useful, | 10 # Adblock Plus is distributed in the hope that it will be useful, |
(...skipping 281 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
292 diff = info["time"] - last_update | 292 diff = info["time"] - last_update |
293 if diff.days >= 365: | 293 if diff.days >= 365: |
294 info["downloadInterval"] = "%i year(s)" % (diff.days / 365) | 294 info["downloadInterval"] = "%i year(s)" % (diff.days / 365) |
295 elif diff.days >= 30: | 295 elif diff.days >= 30: |
296 info["downloadInterval"] = "%i month(s)" % (diff.days / 30) | 296 info["downloadInterval"] = "%i month(s)" % (diff.days / 30) |
297 elif diff.days >= 1: | 297 elif diff.days >= 1: |
298 info["downloadInterval"] = "%i day(s)" % diff.days | 298 info["downloadInterval"] = "%i day(s)" % diff.days |
299 else: | 299 else: |
300 info["downloadInterval"] = "%i hour(s)" % (diff.seconds / 3600) | 300 info["downloadInterval"] = "%i hour(s)" % (diff.seconds / 3600) |
301 | 301 |
302 diffdays = (info["time"].date() - last_update.date()).days | 302 if info["addonName"].startswith("adblockplus"): |
303 if diffdays == 0: | 303 diffdays = (info["time"].date() - last_update.date()).days |
304 info["previousDownload"] = "same day" | 304 if diffdays == 0: |
305 elif diffdays < 30: | 305 info["previousDownload"] = "same day" |
306 info["previousDownload"] = "%i day(s)" % diffdays | 306 elif diffdays < 30: |
307 elif diffdays < 365: | 307 info["previousDownload"] = "%i day(s)" % diffdays |
308 info["previousDownload"] = "%i month(s)" % (diffdays / 30) | 308 elif diffdays < 365: |
309 info["previousDownload"] = "%i month(s)" % (diffdays / 30) | |
310 else: | |
311 info["previousDownload"] = "%i year(s)" % (diffdays / 365) | |
309 else: | 312 else: |
310 info["previousDownload"] = "%i year(s)" % (diffdays / 365) | 313 info["previousDownload"] = "unknown" |
311 | 314 |
312 if last_update.year != info["time"].year or last_update.month != info["tim e"].month: | 315 if last_update.year != info["time"].year or last_update.month != info["tim e"].month: |
313 info["firstInMonth"] = info["firstInDay"] = True | 316 info["firstInMonth"] = info["firstInDay"] = True |
314 elif last_update.day != info["time"].day: | 317 elif last_update.day != info["time"].day: |
315 info["firstInDay"] = True | 318 info["firstInDay"] = True |
316 | 319 |
317 if get_week(last_update) != get_week(info["time"]): | 320 if get_week(last_update) != get_week(info["time"]): |
318 info["firstInWeek"] = True | 321 info["firstInWeek"] = True |
319 except ValueError: | 322 except ValueError: |
320 info["downloadInterval"] = "unknown" | 323 info["downloadInterval"] = "unknown" |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
390 info["fullua"] = "%s %s" % (info["ua"], info["uaversion"]) | 393 info["fullua"] = "%s %s" % (info["ua"], info["uaversion"]) |
391 info["clientid"] = match.group(9) | 394 info["clientid"] = match.group(9) |
392 | 395 |
393 # Additional metadata depends on file type | 396 # Additional metadata depends on file type |
394 filename = os.path.basename(info["file"]) | 397 filename = os.path.basename(info["file"]) |
395 ext = os.path.splitext(filename)[1] | 398 ext = os.path.splitext(filename)[1] |
396 if ext == ".txt" or filename == "update.json" or filename == "notification.jso n": | 399 if ext == ".txt" or filename == "update.json" or filename == "notification.jso n": |
397 # Subscription downloads, libadblockplus update checks and notification | 400 # Subscription downloads, libadblockplus update checks and notification |
398 # checks are performed by the downloader | 401 # checks are performed by the downloader |
399 parse_downloader_query(info) | 402 parse_downloader_query(info) |
400 | |
401 if filename == "notification.json" and info["addonName"] not in ("adblockplu s", "adblockpluschrome", "adblockplusopera", "adblockplussafari"): | |
Felix Dahlke
2014/05/22 07:35:21
I think we should filter this for all downloads re
Wladimir Palant
2014/05/22 07:44:05
a) We really need to do this for all downloads, ot
Sebastian Noack
2014/05/22 08:40:39
Done.
| |
402 return None | |
403 elif ext == ".tpl": | 403 elif ext == ".tpl": |
404 # MSIE TPL download, no additional data here | 404 # MSIE TPL download, no additional data here |
405 pass | 405 pass |
406 elif ext in (".xpi", ".crx", ".apk", ".msi", ".exe", ".safariextz"): | 406 elif ext in (".xpi", ".crx", ".apk", ".msi", ".exe", ".safariextz"): |
407 # Package download, might be an update | 407 # Package download, might be an update |
408 info["installType"] = parse_update_flag(info["query"]) | 408 info["installType"] = parse_update_flag(info["query"]) |
409 elif filename == "update.rdf": | 409 elif filename == "update.rdf": |
410 # Gecko update check or a legacy Android update check. The latter doesn't | 410 # Gecko update check or a legacy Android update check. The latter doesn't |
411 # have usable data anyway so trying the Chrome route won't do any harm. | 411 # have usable data anyway so trying the Chrome route won't do any harm. |
412 info["addonName"] = parse_addon_name(info["file"]) | 412 info["addonName"] = parse_addon_name(info["file"]) |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
545 parser.add_argument("mirror_name", nargs="?", help="Name of the mirror server that the file belongs to") | 545 parser.add_argument("mirror_name", nargs="?", help="Name of the mirror server that the file belongs to") |
546 parser.add_argument("server_type", nargs="?", help="Server type like download, update or subscription") | 546 parser.add_argument("server_type", nargs="?", help="Server type like download, update or subscription") |
547 parser.add_argument("log_file", nargs="?", help="Log file path, can be a local file path, http:// or ssh:// URL") | 547 parser.add_argument("log_file", nargs="?", help="Log file path, can be a local file path, http:// or ssh:// URL") |
548 args = parser.parse_args() | 548 args = parser.parse_args() |
549 | 549 |
550 if args.mirror_name and args.server_type and args.log_file: | 550 if args.mirror_name and args.server_type and args.log_file: |
551 sources = [(args.mirror_name, args.server_type, args.log_file)] | 551 sources = [(args.mirror_name, args.server_type, args.log_file)] |
552 else: | 552 else: |
553 sources = get_stats_files() | 553 sources = get_stats_files() |
554 parse_sources(sources, args.factor, args.verbose) | 554 parse_sources(sources, args.factor, args.verbose) |
LEFT | RIGHT |