| Left: | ||
| Right: |
| LEFT | RIGHT |
|---|---|
| 1 # This file is part of the Adblock Plus web scripts, | 1 # This file is part of the Adblock Plus web scripts, |
| 2 # Copyright (C) 2006-present eyeo GmbH | 2 # Copyright (C) 2006-present eyeo GmbH |
| 3 # | 3 # |
| 4 # Adblock Plus is free software: you can redistribute it and/or modify | 4 # Adblock Plus is free software: you can redistribute it and/or modify |
| 5 # it under the terms of the GNU General Public License version 3 as | 5 # it under the terms of the GNU General Public License version 3 as |
| 6 # published by the Free Software Foundation. | 6 # published by the Free Software Foundation. |
| 7 # | 7 # |
| 8 # Adblock Plus is distributed in the hope that it will be useful, | 8 # Adblock Plus is distributed in the hope that it will be useful, |
| 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of | 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 11 # GNU General Public License for more details. | 11 # GNU General Public License for more details. |
| 12 # | 12 # |
| 13 # You should have received a copy of the GNU General Public License | 13 # You should have received a copy of the GNU General Public License |
| 14 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. | 14 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. |
| 15 | 15 |
| 16 """ | 16 """ |
| 17 | 17 |
| 18 Nightly builds generation script | 18 Nightly builds generation script |
| 19 ================================ | 19 ================================ |
| 20 | 20 |
| 21 This script generates nightly builds of extensions, together | 21 This script generates nightly builds of extensions, together |
| 22 with changelogs and documentation. | 22 with changelogs and documentation. |
| 23 | 23 |
| 24 """ | 24 """ |
| 25 | 25 |
| 26 import argparse | 26 import argparse |
| 27 import ConfigParser | 27 import ConfigParser |
| 28 import binascii | 28 import binascii |
| 29 import base64 | 29 import base64 |
| 30 import datetime | |
| 31 import hashlib | 30 import hashlib |
| 32 import hmac | 31 import hmac |
| 33 import json | 32 import json |
| 34 import logging | 33 import logging |
| 35 import os | 34 import os |
| 36 import pipes | 35 import pipes |
| 37 import random | |
| 38 import shutil | 36 import shutil |
| 39 import struct | 37 import struct |
| 40 import subprocess | 38 import subprocess |
| 41 import sys | 39 import sys |
| 42 import tempfile | 40 import tempfile |
| 43 import time | 41 import time |
| 44 import uuid | 42 import uuid |
| 45 from urllib import urlencode | 43 from urllib import urlencode |
| 46 import urllib2 | 44 import urllib2 |
| 47 import urlparse | 45 import urlparse |
| 48 import zipfile | 46 import zipfile |
| 49 import contextlib | 47 import contextlib |
| 48 from xml.dom.minidom import parse as parseXml | |
| 50 | 49 |
| 51 from Crypto.PublicKey import RSA | 50 from Crypto.PublicKey import RSA |
| 52 from Crypto.Signature import PKCS1_v1_5 | 51 from Crypto.Signature import PKCS1_v1_5 |
| 53 import Crypto.Hash.SHA256 | 52 import Crypto.Hash.SHA256 |
| 54 | 53 |
| 55 from xml.dom.minidom import parse as parseXml | |
| 56 | |
| 57 from sitescripts.extensions.utils import ( | 54 from sitescripts.extensions.utils import ( |
| 58 compareVersions, Configuration, | 55 compareVersions, Configuration, |
| 59 writeAndroidUpdateManifest | 56 writeAndroidUpdateManifest, |
| 60 ) | 57 ) |
| 61 from sitescripts.utils import get_config, get_template | 58 from sitescripts.utils import get_config, get_template |
| 62 | 59 |
| 63 MAX_BUILDS = 50 | 60 MAX_BUILDS = 50 |
| 64 | 61 |
| 65 | 62 |
| 66 # Google and Microsoft APIs use HTTP error codes with error message in | 63 # Google and Microsoft APIs use HTTP error codes with error message in |
| 67 # body. So we add the response body to the HTTPError to get more | 64 # body. So we add the response body to the HTTPError to get more |
| 68 # meaningful error messages. | 65 # meaningful error messages. |
| 69 class HTTPErrorBodyHandler(urllib2.HTTPDefaultErrorHandler): | 66 class HTTPErrorBodyHandler(urllib2.HTTPDefaultErrorHandler): |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 98 | 95 |
| 99 def hasChanges(self): | 96 def hasChanges(self): |
| 100 return self.revision != self.previousRevision | 97 return self.revision != self.previousRevision |
| 101 | 98 |
| 102 def getCurrentRevision(self): | 99 def getCurrentRevision(self): |
| 103 """ | 100 """ |
| 104 retrieves the current revision ID from the repository | 101 retrieves the current revision ID from the repository |
| 105 """ | 102 """ |
| 106 command = [ | 103 command = [ |
| 107 'hg', 'id', '-i', '-r', self.config.revision, '--config', | 104 'hg', 'id', '-i', '-r', self.config.revision, '--config', |
| 108 'defaults.id=', self.config.repository | 105 'defaults.id=', self.config.repository, |
| 109 ] | 106 ] |
| 110 return subprocess.check_output(command).strip() | 107 return subprocess.check_output(command).strip() |
| 111 | 108 |
| 112 def getCurrentBuild(self): | 109 def getCurrentBuild(self): |
| 113 """ | 110 """ |
| 114 calculates the (typically numerical) build ID for the current build | 111 calculates the (typically numerical) build ID for the current build |
| 115 """ | 112 """ |
| 116 command = ['hg', 'id', '-n', '--config', 'defaults.id=', self.tempdir] | 113 command = ['hg', 'id', '-n', '--config', 'defaults.id=', self.tempdir] |
| 117 build = subprocess.check_output(command).strip() | 114 build = subprocess.check_output(command).strip() |
| 118 return build | 115 return build |
| 119 | 116 |
| 120 def getChanges(self): | 117 def getChanges(self): |
| 121 """ | 118 """ |
| 122 retrieve changes between the current and previous ("first") revision | 119 retrieve changes between the current and previous ("first") revision |
| 123 """ | 120 """ |
| 124 command = [ | 121 command = [ |
| 125 'hg', 'log', '-R', self.tempdir, '-r', | 122 'hg', 'log', '-R', self.tempdir, '-r', |
| 126 'reverse(ancestors({}))'.format(self.config.revision), '-l', '50', | 123 'reverse(ancestors({}))'.format(self.config.revision), '-l', '50', |
| 127 '--encoding', 'utf-8', '--template', | 124 '--encoding', 'utf-8', '--template', |
| 128 '{date|isodate}\\0{author|person}\\0{rev}\\0{desc}\\0\\0', | 125 '{date|isodate}\\0{author|person}\\0{rev}\\0{desc}\\0\\0', |
| 129 '--config', 'defaults.log=' | 126 '--config', 'defaults.log=', |
| 130 ] | 127 ] |
| 131 result = subprocess.check_output(command).decode('utf-8') | 128 result = subprocess.check_output(command).decode('utf-8') |
| 132 | 129 |
| 133 for change in result.split('\x00\x00'): | 130 for change in result.split('\x00\x00'): |
| 134 if change: | 131 if change: |
| 135 date, author, revision, description = change.split('\x00') | 132 date, author, revision, description = change.split('\x00') |
| 136 yield {'date': date, 'author': author, 'revision': revision, 'de scription': description} | 133 yield {'date': date, 'author': author, 'revision': revision, 'de scription': description} |
| 137 | 134 |
| 138 def copyRepository(self): | 135 def copyRepository(self): |
| 139 """ | 136 """ |
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 281 os.makedirs(baseDir) | 278 os.makedirs(baseDir) |
| 282 | 279 |
| 283 # ABP for Android used to have its own update manifest format. We need t o | 280 # ABP for Android used to have its own update manifest format. We need t o |
| 284 # generate both that and the new one in the libadblockplus format as lon g | 281 # generate both that and the new one in the libadblockplus format as lon g |
| 285 # as a significant amount of users is on an old version. | 282 # as a significant amount of users is on an old version. |
| 286 if self.config.type == 'android': | 283 if self.config.type == 'android': |
| 287 newManifestPath = os.path.join(baseDir, 'update.json') | 284 newManifestPath = os.path.join(baseDir, 'update.json') |
| 288 writeAndroidUpdateManifest(newManifestPath, [{ | 285 writeAndroidUpdateManifest(newManifestPath, [{ |
| 289 'basename': self.basename, | 286 'basename': self.basename, |
| 290 'version': self.version, | 287 'version': self.version, |
| 291 'updateURL': self.updateURL | 288 'updateURL': self.updateURL, |
| 292 }]) | 289 }]) |
| 293 | 290 |
| 294 template = get_template(get_config().get('extensions', templateName), | 291 template = get_template(get_config().get('extensions', templateName), |
| 295 autoescape=autoescape) | 292 autoescape=autoescape) |
| 296 template.stream({'extensions': [self]}).dump(manifestPath) | 293 template.stream({'extensions': [self]}).dump(manifestPath) |
| 297 | 294 |
| 298 def writeIEUpdateManifest(self, versions): | 295 def writeIEUpdateManifest(self, versions): |
| 299 """ | 296 """ |
| 300 Writes update.json file for the latest IE build | 297 Writes update.json file for the latest IE build |
| 301 """ | 298 """ |
| 302 if len(versions) == 0: | 299 if len(versions) == 0: |
| 303 return | 300 return |
| 304 | 301 |
| 305 version = versions[0] | 302 version = versions[0] |
| 306 packageName = self.basename + '-' + version + self.config.packageSuffix | 303 packageName = self.basename + '-' + version + self.config.packageSuffix |
| 307 updateURL = urlparse.urljoin(self.config.nightliesURL, self.basename + ' /' + packageName + '?update') | 304 updateURL = urlparse.urljoin(self.config.nightliesURL, self.basename + ' /' + packageName + '?update') |
| 308 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) | 305 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) |
| 309 manifestPath = os.path.join(baseDir, 'update.json') | 306 manifestPath = os.path.join(baseDir, 'update.json') |
| 310 | 307 |
| 311 from sitescripts.extensions.utils import writeIEUpdateManifest as doWrit e | 308 from sitescripts.extensions.utils import writeIEUpdateManifest as doWrit e |
| 312 doWrite(manifestPath, [{ | 309 doWrite(manifestPath, [{ |
| 313 'basename': self.basename, | 310 'basename': self.basename, |
| 314 'version': version, | 311 'version': version, |
| 315 'updateURL': updateURL | 312 'updateURL': updateURL, |
| 316 }]) | 313 }]) |
| 317 | 314 |
| 318 for suffix in ['-x86.msi', '-x64.msi', '-gpo-x86.msi', '-gpo-x64.msi']: | 315 for suffix in ['-x86.msi', '-x64.msi', '-gpo-x86.msi', '-gpo-x64.msi']: |
| 319 linkPath = os.path.join(baseDir, '00latest%s' % suffix) | 316 linkPath = os.path.join(baseDir, '00latest%s' % suffix) |
| 320 outputPath = os.path.join(baseDir, self.basename + '-' + version + s uffix) | 317 outputPath = os.path.join(baseDir, self.basename + '-' + version + s uffix) |
| 321 self.symlink_or_copy(outputPath, linkPath) | 318 self.symlink_or_copy(outputPath, linkPath) |
| 322 | 319 |
| 323 def build(self): | 320 def build(self): |
| 324 """ | 321 """ |
| 325 run the build command in the tempdir | 322 run the build command in the tempdir |
| 326 """ | 323 """ |
| 327 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) | 324 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) |
| 328 if not os.path.exists(baseDir): | 325 if not os.path.exists(baseDir): |
| 329 os.makedirs(baseDir) | 326 os.makedirs(baseDir) |
| 330 outputFile = '%s-%s%s' % (self.basename, self.version, self.config.packa geSuffix) | 327 outputFile = '%s-%s%s' % (self.basename, self.version, self.config.packa geSuffix) |
| 331 self.path = os.path.join(baseDir, outputFile) | 328 self.path = os.path.join(baseDir, outputFile) |
| 332 self.updateURL = urlparse.urljoin(self.config.nightliesURL, self.basenam e + '/' + outputFile + '?update') | 329 self.updateURL = urlparse.urljoin(self.config.nightliesURL, self.basenam e + '/' + outputFile + '?update') |
| 333 | 330 |
| 334 if self.config.type == 'android': | 331 if self.config.type == 'android': |
| 335 apkFile = open(self.path, 'wb') | 332 apkFile = open(self.path, 'wb') |
| 336 | 333 |
| 337 try: | 334 try: |
| 338 try: | 335 try: |
| 339 port = get_config().get('extensions', 'androidBuildPort') | 336 port = get_config().get('extensions', 'androidBuildPort') |
| 340 except ConfigParser.NoOptionError: | 337 except ConfigParser.NoOptionError: |
| 341 port = '22' | 338 port = '22' |
| 342 command = ['ssh', '-p', port, get_config().get('extensions', 'an droidBuildHost')] | 339 command = ['ssh', '-p', port, get_config().get('extensions', 'an droidBuildHost')] |
| 343 command.extend(map(pipes.quote, [ | 340 command.extend(map(pipes.quote, [ |
| 344 '/home/android/bin/makedebugbuild.py', '--revision', | 341 '/home/android/bin/makedebugbuild.py', '--revision', |
| 345 self.buildNum, '--version', self.version, '--stdout' | 342 self.buildNum, '--version', self.version, '--stdout', |
| 346 ])) | 343 ])) |
| 347 subprocess.check_call(command, stdout=apkFile, close_fds=True) | 344 subprocess.check_call(command, stdout=apkFile, close_fds=True) |
| 348 except: | 345 except: |
| 349 # clear broken output if any | 346 # clear broken output if any |
| 350 if os.path.exists(self.path): | 347 if os.path.exists(self.path): |
| 351 os.remove(self.path) | 348 os.remove(self.path) |
| 352 raise | 349 raise |
| 353 else: | 350 else: |
| 354 env = os.environ | 351 env = os.environ |
| 355 spiderMonkeyBinary = self.config.spiderMonkeyBinary | 352 spiderMonkeyBinary = self.config.spiderMonkeyBinary |
| 356 if spiderMonkeyBinary: | 353 if spiderMonkeyBinary: |
| 357 env = dict(env, SPIDERMONKEY_BINARY=spiderMonkeyBinary) | 354 env = dict(env, SPIDERMONKEY_BINARY=spiderMonkeyBinary) |
| 358 | 355 |
| 359 command = [os.path.join(self.tempdir, 'build.py')] | 356 command = [os.path.join(self.tempdir, 'build.py')] |
| 360 if self.config.type in {'safari', 'edge'}: | 357 command.extend(['build', '-t', self.config.type, '-b', |
|
tlucas
2018/04/13 13:06:04
The branch 'edge' is built from still uses the old
Sebastian Noack
2018/04/14 02:47:30
I would rather see a dependency update landing in
tlucas
2018/04/14 08:55:46
Yeah, i agree.
@Ollie - what do you think about th
Sebastian Noack
2018/04/14 09:40:54
Alternatively, we could add a hack to build.py:
Oleksandr
2018/04/16 04:37:07
I would rather merge `master` into `edge` first. T
Sebastian Noack
2018/04/16 05:45:50
Wouldn't this rather be a reason to go with my sug
Sebastian Noack
2018/04/16 05:47:19
Sorry, I meant "edge" (not "next").
tlucas
2018/04/16 10:06:07
All of the above does IMHO encourage a workaround
tlucas
2018/04/16 10:25:14
https://codereview.adblockplus.org/29753557/
https
tlucas
2018/04/16 14:50:09
Done.
| |
| 361 command.extend(['-t', self.config.type, 'build']) | 358 self.buildNum]) |
| 362 else: | |
| 363 command.extend(['build', '-t', self.config.type]) | |
| 364 command.extend(['-b', self.buildNum]) | |
| 365 | 359 |
| 366 if self.config.type not in {'gecko', 'edge'}: | 360 if self.config.type not in {'gecko', 'edge'}: |
| 367 command.extend(['-k', self.config.keyFile]) | 361 command.extend(['-k', self.config.keyFile]) |
| 368 command.append(self.path) | 362 command.append(self.path) |
| 369 subprocess.check_call(command, env=env) | 363 subprocess.check_call(command, env=env) |
| 370 | 364 |
| 371 if not os.path.exists(self.path): | 365 if not os.path.exists(self.path): |
| 372 raise Exception("Build failed, output file hasn't been created") | 366 raise Exception("Build failed, output file hasn't been created") |
| 373 | 367 |
| 374 if self.config.type not in self.downloadable_repos: | 368 if self.config.type not in self.downloadable_repos: |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 412 packageFile = self.basename + '-' + version + self.config.packageSuf fix | 406 packageFile = self.basename + '-' + version + self.config.packageSuf fix |
| 413 changelogFile = self.basename + '-' + version + '.changelog.xhtml' | 407 changelogFile = self.basename + '-' + version + '.changelog.xhtml' |
| 414 if not os.path.exists(os.path.join(baseDir, packageFile)): | 408 if not os.path.exists(os.path.join(baseDir, packageFile)): |
| 415 # Oops | 409 # Oops |
| 416 continue | 410 continue |
| 417 | 411 |
| 418 link = { | 412 link = { |
| 419 'version': version, | 413 'version': version, |
| 420 'download': packageFile, | 414 'download': packageFile, |
| 421 'mtime': os.path.getmtime(os.path.join(baseDir, packageFile)), | 415 'mtime': os.path.getmtime(os.path.join(baseDir, packageFile)), |
| 422 'size': os.path.getsize(os.path.join(baseDir, packageFile)) | 416 'size': os.path.getsize(os.path.join(baseDir, packageFile)), |
| 423 } | 417 } |
| 424 if os.path.exists(os.path.join(baseDir, changelogFile)): | 418 if os.path.exists(os.path.join(baseDir, changelogFile)): |
| 425 link['changelog'] = changelogFile | 419 link['changelog'] = changelogFile |
| 426 links.append(link) | 420 links.append(link) |
| 427 template = get_template(get_config().get('extensions', 'nightlyIndexPage ')) | 421 template = get_template(get_config().get('extensions', 'nightlyIndexPage ')) |
| 428 template.stream({'config': self.config, 'links': links}).dump(outputPath ) | 422 template.stream({'config': self.config, 'links': links}).dump(outputPath ) |
| 429 | 423 |
| 430 def read_downloads_lockfile(self): | 424 def read_downloads_lockfile(self): |
| 431 path = get_config().get('extensions', 'downloadLockFile') | 425 path = get_config().get('extensions', 'downloadLockFile') |
| 432 try: | 426 try: |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 457 try: | 451 try: |
| 458 for i, entry in enumerate(current[platform]): | 452 for i, entry in enumerate(current[platform]): |
| 459 if entry[filter_key] == filter_value: | 453 if entry[filter_key] == filter_value: |
| 460 del current[platform][i] | 454 del current[platform][i] |
| 461 if len(current[platform]) == 0: | 455 if len(current[platform]) == 0: |
| 462 del current[platform] | 456 del current[platform] |
| 463 except KeyError: | 457 except KeyError: |
| 464 pass | 458 pass |
| 465 self.write_downloads_lockfile(current) | 459 self.write_downloads_lockfile(current) |
| 466 | 460 |
| 467 def generate_jwt_request(self, issuer, secret, url, method, data=None, | 461 def azure_jwt_signature_fnc(self): |
| 468 add_headers=[]): | 462 return ( |
| 469 header = { | 463 'RS256', |
| 470 'alg': 'HS256', # HMAC-SHA256 | 464 lambda s, m: PKCS1_v1_5.new(s).sign(Crypto.Hash.SHA256.new(m)), |
| 471 'typ': 'JWT', | 465 ) |
| 466 | |
| 467 def mozilla_jwt_signature_fnc(self): | |
| 468 return ( | |
| 469 'HS256', | |
| 470 lambda s, m: hmac.new(s, msg=m, digestmod=hashlib.sha256).digest(), | |
| 471 ) | |
| 472 | |
| 473 def sign_jwt(self, issuer, secret, url, signature_fnc, jwt_headers={}): | |
| 474 alg, fnc = signature_fnc() | |
| 475 | |
| 476 header = {'typ': 'JWT'} | |
| 477 header.update(jwt_headers) | |
| 478 header.update({'alg': alg}) | |
| 479 | |
| 480 issued = int(time.time()) | |
| 481 expires = issued + 60 | |
| 482 | |
| 483 payload = { | |
| 484 'aud': url, | |
| 485 'iss': issuer, | |
| 486 'sub': issuer, | |
| 487 'jti': str(uuid.uuid4()), | |
| 488 'iat': issued, | |
| 489 'nbf': issued, | |
| 490 'exp': expires, | |
| 472 } | 491 } |
| 473 | 492 |
| 474 issued = int(time.time()) | 493 segments = [base64.urlsafe_b64encode(json.dumps(header)), |
| 475 payload = { | 494 base64.urlsafe_b64encode(json.dumps(payload))] |
| 476 'iss': issuer, | 495 |
| 477 'jti': random.random(), | 496 signature = fnc(secret, b'.'.join(segments)) |
| 478 'iat': issued, | 497 segments.append(base64.urlsafe_b64encode(signature)) |
| 479 'exp': issued + 60, | 498 return b'.'.join(segments) |
| 480 } | 499 |
| 481 | 500 def generate_mozilla_jwt_request(self, issuer, secret, url, method, |
| 482 hmac_data = '{}.{}'.format( | 501 data=None, add_headers=[]): |
| 483 base64.b64encode(json.dumps(header)), | 502 signed = self.sign_jwt(issuer, secret, url, |
| 484 base64.b64encode(json.dumps(payload)) | 503 self.mozilla_jwt_signature_fnc) |
| 485 ) | |
| 486 | |
| 487 signature = hmac.new(secret, msg=hmac_data, | |
| 488 digestmod=hashlib.sha256).digest() | |
| 489 token = '{}.{}'.format(hmac_data, base64.b64encode(signature)) | |
| 490 | 504 |
| 491 request = urllib2.Request(url, data) | 505 request = urllib2.Request(url, data) |
| 492 request.add_header('Authorization', 'JWT ' + token) | 506 request.add_header('Authorization', 'JWT ' + signed) |
| 493 for header in add_headers: | 507 for header in add_headers: |
| 494 request.add_header(*header) | 508 request.add_header(*header) |
| 495 request.get_method = lambda: method | 509 request.get_method = lambda: method |
| 496 | 510 |
| 497 return request | 511 return request |
| 498 | 512 |
| 499 def uploadToMozillaAddons(self): | 513 def uploadToMozillaAddons(self): |
| 500 import urllib3 | 514 import urllib3 |
| 501 | 515 |
| 502 config = get_config() | 516 config = get_config() |
| 503 | 517 |
| 504 upload_url = ('https://addons.mozilla.org/api/v3/addons/{}/' | 518 upload_url = ('https://addons.mozilla.org/api/v3/addons/{}/' |
| 505 'versions/{}/').format(self.extensionID, self.version) | 519 'versions/{}/').format(self.extensionID, self.version) |
| 506 | 520 |
| 507 with open(self.path, 'rb') as file: | 521 with open(self.path, 'rb') as file: |
| 508 data, content_type = urllib3.filepost.encode_multipart_formdata({ | 522 data, content_type = urllib3.filepost.encode_multipart_formdata({ |
| 509 'upload': ( | 523 'upload': ( |
| 510 os.path.basename(self.path), | 524 os.path.basename(self.path), |
| 511 file.read(), | 525 file.read(), |
| 512 'application/x-xpinstall' | 526 'application/x-xpinstall', |
| 513 ) | 527 ), |
| 514 }) | 528 }) |
| 515 | 529 |
| 516 request = self.generate_jwt_request( | 530 request = self.generate_mozilla_jwt_request( |
| 517 config.get('extensions', 'amo_key'), | 531 config.get('extensions', 'amo_key'), |
| 518 config.get('extensions', 'amo_secret'), | 532 config.get('extensions', 'amo_secret'), |
| 519 upload_url, | 533 upload_url, |
| 520 'PUT', | 534 'PUT', |
| 521 data, | 535 data, |
| 522 [('Content-Type', content_type)] | 536 [('Content-Type', content_type)], |
| 523 ) | 537 ) |
| 524 | 538 |
| 525 try: | 539 try: |
| 526 urllib2.urlopen(request).close() | 540 urllib2.urlopen(request).close() |
| 527 except urllib2.HTTPError as e: | 541 except urllib2.HTTPError as e: |
| 528 try: | 542 try: |
| 529 logging.error(e.read()) | 543 logging.error(e.read()) |
| 530 finally: | 544 finally: |
| 531 e.close() | 545 e.close() |
| 532 raise | 546 raise |
| 533 | 547 |
| 534 self.add_to_downloads_lockfile( | 548 self.add_to_downloads_lockfile( |
| 535 self.config.type, | 549 self.config.type, |
| 536 { | 550 { |
| 537 'buildtype': 'devbuild', | 551 'buildtype': 'devbuild', |
| 538 'app_id': self.extensionID, | 552 'app_id': self.extensionID, |
| 539 'version': self.version, | 553 'version': self.version, |
| 540 } | 554 }, |
| 541 ) | 555 ) |
| 542 os.remove(self.path) | 556 os.remove(self.path) |
| 543 | 557 |
| 544 def download_from_mozilla_addons(self, buildtype, version, app_id): | 558 def download_from_mozilla_addons(self, buildtype, version, app_id): |
| 545 config = get_config() | 559 config = get_config() |
| 546 iss = config.get('extensions', 'amo_key') | 560 iss = config.get('extensions', 'amo_key') |
| 547 secret = config.get('extensions', 'amo_secret') | 561 secret = config.get('extensions', 'amo_secret') |
| 548 | 562 |
| 549 url = ('https://addons.mozilla.org/api/v3/addons/{}/' | 563 url = ('https://addons.mozilla.org/api/v3/addons/{}/' |
| 550 'versions/{}/').format(app_id, version) | 564 'versions/{}/').format(app_id, version) |
| 551 | 565 |
| 552 request = self.generate_jwt_request(iss, secret, url, 'GET') | 566 request = self.generate_mozilla_jwt_request( |
| 567 iss, secret, url, 'GET', | |
| 568 ) | |
| 553 response = json.load(urllib2.urlopen(request)) | 569 response = json.load(urllib2.urlopen(request)) |
| 554 | 570 |
| 555 filename = '{}-{}.xpi'.format(self.basename, version) | 571 filename = '{}-{}.xpi'.format(self.basename, version) |
| 556 self.path = os.path.join( | 572 self.path = os.path.join( |
| 557 config.get('extensions', 'nightliesDirectory'), | 573 config.get('extensions', 'nightliesDirectory'), |
| 558 self.basename, | 574 self.basename, |
| 559 filename | 575 filename, |
| 560 ) | 576 ) |
| 561 | 577 |
| 562 necessary = ['passed_review', 'reviewed', 'processed', 'valid'] | 578 necessary = ['passed_review', 'reviewed', 'processed', 'valid'] |
| 563 if all(response[x] for x in necessary): | 579 if all(response[x] for x in necessary): |
| 564 download_url = response['files'][0]['download_url'] | 580 download_url = response['files'][0]['download_url'] |
| 565 checksum = response['files'][0]['hash'] | 581 checksum = response['files'][0]['hash'] |
| 566 | 582 |
| 567 request = self.generate_jwt_request(iss, secret, download_url, | 583 request = self.generate_mozilla_jwt_request( |
| 568 'GET') | 584 iss, secret, download_url, 'GET', |
| 585 ) | |
| 569 try: | 586 try: |
| 570 response = urllib2.urlopen(request) | 587 response = urllib2.urlopen(request) |
| 571 except urllib2.HTTPError as e: | 588 except urllib2.HTTPError as e: |
| 572 logging.error(e.read()) | 589 logging.error(e.read()) |
| 573 | 590 |
| 574 # Verify the extension's integrity | 591 # Verify the extension's integrity |
| 575 file_content = response.read() | 592 file_content = response.read() |
| 576 sha256 = hashlib.sha256(file_content) | 593 sha256 = hashlib.sha256(file_content) |
| 577 returned_checksum = '{}:{}'.format(sha256.name, sha256.hexdigest()) | 594 returned_checksum = '{}:{}'.format(sha256.name, sha256.hexdigest()) |
| 578 | 595 |
| 579 if returned_checksum != checksum: | 596 if returned_checksum != checksum: |
| 580 logging.error('Checksum could not be verified: {} vs {}' | 597 logging.error('Checksum could not be verified: {} vs {}' |
| 581 ''.format(checksum, returned_checksum)) | 598 ''.format(checksum, returned_checksum)) |
| 582 | 599 |
| 583 with open(self.path, 'w') as fp: | 600 with open(self.path, 'w') as fp: |
| 584 fp.write(file_content) | 601 fp.write(file_content) |
| 585 | 602 |
| 586 self.update_link = os.path.join( | 603 self.update_link = os.path.join( |
| 587 config.get('extensions', 'nightliesURL'), | 604 config.get('extensions', 'nightliesURL'), |
| 588 self.basename, | 605 self.basename, |
| 589 filename | 606 filename, |
| 590 ) | 607 ) |
| 591 | 608 |
| 592 self.remove_from_downloads_lockfile(self.config.type, | 609 self.remove_from_downloads_lockfile(self.config.type, |
| 593 'version', | 610 'version', |
| 594 version) | 611 version) |
| 595 elif not response['passed_review'] or not response['valid']: | 612 elif not response['passed_review'] or not response['valid']: |
| 596 # When the review failed for any reason, we want to know about it | 613 # When the review failed for any reason, we want to know about it |
| 597 logging.error(json.dumps(response, indent=4)) | 614 logging.error(json.dumps(response, indent=4)) |
| 598 self.remove_from_downloads_lockfile(self.config.type, | 615 self.remove_from_downloads_lockfile(self.config.type, |
| 599 'version', | 616 'version', |
| 600 version) | 617 version) |
| 601 | 618 |
| 602 def uploadToChromeWebStore(self): | 619 def uploadToChromeWebStore(self): |
| 603 | 620 |
| 604 opener = urllib2.build_opener(HTTPErrorBodyHandler) | 621 opener = urllib2.build_opener(HTTPErrorBodyHandler) |
| 605 | 622 |
| 606 # use refresh token to obtain a valid access token | 623 # use refresh token to obtain a valid access token |
| 607 # https://developers.google.com/accounts/docs/OAuth2WebServer#refresh | 624 # https://developers.google.com/accounts/docs/OAuth2WebServer#refresh |
| 608 | 625 |
| 609 response = json.load(opener.open( | 626 response = json.load(opener.open( |
| 610 'https://accounts.google.com/o/oauth2/token', | 627 'https://accounts.google.com/o/oauth2/token', |
| 611 | 628 |
| 612 urlencode([ | 629 urlencode([ |
| 613 ('refresh_token', self.config.refreshToken), | 630 ('refresh_token', self.config.refreshToken), |
| 614 ('client_id', self.config.clientID), | 631 ('client_id', self.config.clientID), |
| 615 ('client_secret', self.config.clientSecret), | 632 ('client_secret', self.config.clientSecret), |
| 616 ('grant_type', 'refresh_token'), | 633 ('grant_type', 'refresh_token'), |
| 617 ]) | 634 ]), |
| 618 )) | 635 )) |
| 619 | 636 |
| 620 auth_token = '%s %s' % (response['token_type'], response['access_token'] ) | 637 auth_token = '%s %s' % (response['token_type'], response['access_token'] ) |
| 621 | 638 |
| 622 # upload a new version with the Chrome Web Store API | 639 # upload a new version with the Chrome Web Store API |
| 623 # https://developer.chrome.com/webstore/using_webstore_api#uploadexisitn g | 640 # https://developer.chrome.com/webstore/using_webstore_api#uploadexisitn g |
| 624 | 641 |
| 625 request = urllib2.Request('https://www.googleapis.com/upload/chromewebst ore/v1.1/items/' + self.config.devbuildGalleryID) | 642 request = urllib2.Request('https://www.googleapis.com/upload/chromewebst ore/v1.1/items/' + self.config.devbuildGalleryID) |
| 626 request.get_method = lambda: 'PUT' | 643 request.get_method = lambda: 'PUT' |
| 627 request.add_header('Authorization', auth_token) | 644 request.add_header('Authorization', auth_token) |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 652 request.add_header('Content-Length', '0') | 669 request.add_header('Content-Length', '0') |
| 653 | 670 |
| 654 response = json.load(opener.open(request)) | 671 response = json.load(opener.open(request)) |
| 655 | 672 |
| 656 if any(status not in ('OK', 'ITEM_PENDING_REVIEW') for status in respons e['status']): | 673 if any(status not in ('OK', 'ITEM_PENDING_REVIEW') for status in respons e['status']): |
| 657 raise Exception({'status': response['status'], 'statusDetail': respo nse['statusDetail']}) | 674 raise Exception({'status': response['status'], 'statusDetail': respo nse['statusDetail']}) |
| 658 | 675 |
| 659 def generate_certificate_token_request(self, url, private_key): | 676 def generate_certificate_token_request(self, url, private_key): |
| 660 # Construct the token request according to | 677 # Construct the token request according to |
| 661 # https://docs.microsoft.com/en-us/azure/active-directory/develop/active -directory-certificate-credentials | 678 # https://docs.microsoft.com/en-us/azure/active-directory/develop/active -directory-certificate-credentials |
| 662 def base64url_encode(data): | |
| 663 return base64.urlsafe_b64encode(data).replace(b'=', b'') | |
|
Sebastian Noack
2018/04/14 02:47:30
How about .rstrip(b'=')?
tlucas
2018/04/14 08:55:46
see below
tlucas
2018/04/16 14:50:09
FWIW, there's no stripping / replacing done anymor
| |
| 664 | |
| 665 segments = [] | |
| 666 | |
| 667 hex_val = binascii.a2b_hex(self.config.thumbprint) | 679 hex_val = binascii.a2b_hex(self.config.thumbprint) |
| 668 x5t = base64.urlsafe_b64encode(hex_val).decode() | 680 x5t = base64.urlsafe_b64encode(hex_val).decode() |
| 669 | 681 |
| 670 now = datetime.datetime.now() | |
|
Sebastian Noack
2018/04/14 02:47:30
It seems you are relying on the fact that our serv
tlucas
2018/04/14 08:55:46
I'm actually thinking about refactoring parts of t
tlucas
2018/04/16 14:50:09
Done, almost: mktime() expects a time_struct in lo
Sebastian Noack
2018/04/16 16:13:29
You are right. But time.time() gives the same resu
tlucas
2018/04/16 16:50:24
Done.
| |
| 671 minutes = datetime.timedelta(0, 0, 0, 0, 10) | |
| 672 expires = now + minutes | |
| 673 | |
| 674 # generate the full jwt body | |
| 675 jwt_payload = { | |
| 676 'aud': url, | |
| 677 'iss': self.config.clientID, | |
| 678 'sub': self.config.clientID, | |
| 679 'nbf': int(time.mktime(now.timetuple())), | |
| 680 'exp': int(time.mktime(expires.timetuple())), | |
| 681 'jti': str(uuid.uuid4()), | |
| 682 } | |
| 683 | |
| 684 jwt_headers = {'typ': 'JWT', 'alg': 'RS256', 'x5t': x5t} | |
| 685 | |
| 686 # sign the jwt body with the given private key | |
| 687 key = RSA.importKey(private_key) | 682 key = RSA.importKey(private_key) |
| 688 | 683 |
| 689 segments.append(base64url_encode(json.dumps(jwt_headers))) | 684 signed = self.sign_jwt(self.config.clientID, key, url, |
| 690 segments.append(base64url_encode(json.dumps(jwt_payload))) | 685 self.azure_jwt_signature_fnc, |
|
Sebastian Noack
2018/04/14 02:47:30
Since we don't append to segments above, how about
tlucas
2018/04/14 08:55:46
Acknowledged.
tlucas
2018/04/16 14:50:09
Done.
| |
| 691 | 686 jwt_headers={'x5t': x5t}) |
| 692 body = b'.'.join(segments) | |
| 693 signature = PKCS1_v1_5.new(key).sign(Crypto.Hash.SHA256.new(body)) | |
| 694 | |
| 695 segments.append(base64url_encode(signature)) | |
| 696 signed_jwt = b'.'.join(segments) | |
| 697 | 687 |
| 698 # generate oauth parameters for login.microsoft.com | 688 # generate oauth parameters for login.microsoft.com |
| 699 oauth_params = { | 689 oauth_params = { |
| 700 'grant_type': 'client_credentials', | 690 'grant_type': 'client_credentials', |
| 701 'client_id': self.config.clientID, | 691 'client_id': self.config.clientID, |
| 702 'resource': 'https://graph.windows.net', | 692 'resource': 'https://graph.windows.net', |
| 703 'client_assertion_type': 'urn:ietf:params:oauth:client-assertion-' | 693 'client_assertion_type': 'urn:ietf:params:oauth:client-assertion-' |
| 704 'type:jwt-bearer', | 694 'type:jwt-bearer', |
| 705 'client_assertion': signed_jwt, | 695 'client_assertion': signed, |
| 706 } | 696 } |
| 707 | 697 |
| 708 request = urllib2.Request(url, urlencode(oauth_params)) | 698 request = urllib2.Request(url, urlencode(oauth_params)) |
| 709 request.get_method = lambda: 'POST' | 699 request.get_method = lambda: 'POST' |
| 710 | 700 |
| 711 return request | 701 return request |
| 712 | 702 |
| 713 def get_windows_store_access_token(self): | 703 def get_windows_store_access_token(self): |
| 714 # use client certificate to obtain a valid access token | 704 # use client certificate to obtain a valid access token |
| 715 url = 'https://login.microsoftonline.com/{}/oauth2/token'.format( | 705 url_template = 'https://login.microsoftonline.com/{}/oauth2/token' |
| 716 self.config.tenantID | 706 url = url_template.format(self.config.tenantID) |
| 717 ) | |
| 718 | 707 |
| 719 with open(self.config.privateKey, 'r') as fp: | 708 with open(self.config.privateKey, 'r') as fp: |
| 720 private_key = fp.read() | 709 private_key = fp.read() |
| 721 | 710 |
| 722 opener = urllib2.build_opener(HTTPErrorBodyHandler) | 711 opener = urllib2.build_opener(HTTPErrorBodyHandler) |
| 723 request = self.generate_certificate_token_request(url, private_key) | 712 request = self.generate_certificate_token_request(url, private_key) |
| 724 | 713 |
| 725 with contextlib.closing(opener.open(request)) as response: | 714 with contextlib.closing(opener.open(request)) as response: |
| 726 data = json.load(response) | 715 data = json.load(response) |
| 727 auth_token = '{0[token_type]} {0[access_token]}'.format(data) | 716 auth_token = '{0[token_type]} {0[access_token]}'.format(data) |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 753 def upload_to_windows_store(self): | 742 def upload_to_windows_store(self): |
| 754 opener = urllib2.build_opener(HTTPErrorBodyHandler) | 743 opener = urllib2.build_opener(HTTPErrorBodyHandler) |
| 755 | 744 |
| 756 headers = {'Authorization': self.get_windows_store_access_token(), | 745 headers = {'Authorization': self.get_windows_store_access_token(), |
| 757 'Content-type': 'application/json'} | 746 'Content-type': 'application/json'} |
| 758 | 747 |
| 759 # Get application | 748 # Get application |
| 760 # https://docs.microsoft.com/en-us/windows/uwp/monetize/get-an-app | 749 # https://docs.microsoft.com/en-us/windows/uwp/monetize/get-an-app |
| 761 api_path = '{}/v1.0/my/applications/{}'.format( | 750 api_path = '{}/v1.0/my/applications/{}'.format( |
| 762 'https://manage.devcenter.microsoft.com', | 751 'https://manage.devcenter.microsoft.com', |
| 763 self.config.devbuildGalleryID | 752 self.config.devbuildGalleryID, |
| 764 ) | 753 ) |
| 765 | 754 |
| 766 request = urllib2.Request(api_path, None, headers) | 755 request = urllib2.Request(api_path, None, headers) |
| 767 with contextlib.closing(opener.open(request)) as response: | 756 with contextlib.closing(opener.open(request)) as response: |
| 768 app_obj = json.load(response) | 757 app_obj = json.load(response) |
| 769 | 758 |
| 770 # Delete existing in-progress submission | 759 # Delete existing in-progress submission |
| 771 # https://docs.microsoft.com/en-us/windows/uwp/monetize/delete-an-app-su bmission | 760 # https://docs.microsoft.com/en-us/windows/uwp/monetize/delete-an-app-su bmission |
| 772 submissions_path = api_path + '/submissions' | 761 submissions_path = api_path + '/submissions' |
| 773 if 'pendingApplicationSubmission' in app_obj: | 762 if 'pendingApplicationSubmission' in app_obj: |
| (...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 900 # write update manifest | 889 # write update manifest |
| 901 self.writeUpdateManifest() | 890 self.writeUpdateManifest() |
| 902 | 891 |
| 903 # retire old builds | 892 # retire old builds |
| 904 versions = self.retireBuilds() | 893 versions = self.retireBuilds() |
| 905 # update index page | 894 # update index page |
| 906 self.updateIndex(versions) | 895 self.updateIndex(versions) |
| 907 | 896 |
| 908 # Update soft link to latest build | 897 # Update soft link to latest build |
| 909 baseDir = os.path.join( | 898 baseDir = os.path.join( |
| 910 self.config.nightliesDirectory, self.basename | 899 self.config.nightliesDirectory, self.basename, |
| 911 ) | 900 ) |
| 912 linkPath = os.path.join( | 901 linkPath = os.path.join( |
| 913 baseDir, '00latest' + self.config.packageSuffix | 902 baseDir, '00latest' + self.config.packageSuffix, |
| 914 ) | 903 ) |
| 915 | 904 |
| 916 self.symlink_or_copy(self.path, linkPath) | 905 self.symlink_or_copy(self.path, linkPath) |
| 917 finally: | 906 finally: |
| 918 # clean up | 907 # clean up |
| 919 if self.tempdir: | 908 if self.tempdir: |
| 920 shutil.rmtree(self.tempdir, ignore_errors=True) | 909 shutil.rmtree(self.tempdir, ignore_errors=True) |
| 921 | 910 |
| 922 | 911 |
| 923 def main(download=False): | 912 def main(download=False): |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 947 | 936 |
| 948 file = open(nightlyConfigFile, 'wb') | 937 file = open(nightlyConfigFile, 'wb') |
| 949 nightlyConfig.write(file) | 938 nightlyConfig.write(file) |
| 950 | 939 |
| 951 | 940 |
| 952 if __name__ == '__main__': | 941 if __name__ == '__main__': |
| 953 parser = argparse.ArgumentParser() | 942 parser = argparse.ArgumentParser() |
| 954 parser.add_argument('--download', action='store_true', default=False) | 943 parser.add_argument('--download', action='store_true', default=False) |
| 955 args = parser.parse_args() | 944 args = parser.parse_args() |
| 956 main(args.download) | 945 main(args.download) |
| LEFT | RIGHT |