Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: sitescripts/extensions/bin/createNightlies.py

Issue 29751598: Issue 6291 - Use client certificate for Windows Store uploads (Closed) Base URL: https://hg.adblockplus.org/abpssembly/file/a67d8f0e66b2
Left Patch Set: Created April 16, 2018, 4:49 p.m.
Right Patch Set: NO CHANGE rebase against https://codereview.adblockplus.org/29756646/ Created April 20, 2018, 7:23 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « no previous file | sitescripts/extensions/utils.py » ('j') | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 # This file is part of the Adblock Plus web scripts, 1 # This file is part of the Adblock Plus web scripts,
2 # Copyright (C) 2006-present eyeo GmbH 2 # Copyright (C) 2006-present eyeo GmbH
3 # 3 #
4 # Adblock Plus is free software: you can redistribute it and/or modify 4 # Adblock Plus is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License version 3 as 5 # it under the terms of the GNU General Public License version 3 as
6 # published by the Free Software Foundation. 6 # published by the Free Software Foundation.
7 # 7 #
8 # Adblock Plus is distributed in the hope that it will be useful, 8 # Adblock Plus is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
(...skipping 27 matching lines...) Expand all
38 import subprocess 38 import subprocess
39 import sys 39 import sys
40 import tempfile 40 import tempfile
41 import time 41 import time
42 import uuid 42 import uuid
43 from urllib import urlencode 43 from urllib import urlencode
44 import urllib2 44 import urllib2
45 import urlparse 45 import urlparse
46 import zipfile 46 import zipfile
47 import contextlib 47 import contextlib
48 from xml.dom.minidom import parse as parseXml
48 49
49 from Crypto.PublicKey import RSA 50 from Crypto.PublicKey import RSA
50 from Crypto.Signature import PKCS1_v1_5 51 from Crypto.Signature import PKCS1_v1_5
51 import Crypto.Hash.SHA256 52 import Crypto.Hash.SHA256
52 53
53 from xml.dom.minidom import parse as parseXml
54
55 from sitescripts.extensions.utils import ( 54 from sitescripts.extensions.utils import (
56 compareVersions, Configuration, 55 compareVersions, Configuration,
57 writeAndroidUpdateManifest 56 writeAndroidUpdateManifest,
58 ) 57 )
59 from sitescripts.utils import get_config, get_template 58 from sitescripts.utils import get_config, get_template
60 59
61 MAX_BUILDS = 50 60 MAX_BUILDS = 50
62 61
63 62
64 # Google and Microsoft APIs use HTTP error codes with error message in 63 # Google and Microsoft APIs use HTTP error codes with error message in
65 # body. So we add the response body to the HTTPError to get more 64 # body. So we add the response body to the HTTPError to get more
66 # meaningful error messages. 65 # meaningful error messages.
67 class HTTPErrorBodyHandler(urllib2.HTTPDefaultErrorHandler): 66 class HTTPErrorBodyHandler(urllib2.HTTPDefaultErrorHandler):
(...skipping 28 matching lines...) Expand all
96 95
97 def hasChanges(self): 96 def hasChanges(self):
98 return self.revision != self.previousRevision 97 return self.revision != self.previousRevision
99 98
100 def getCurrentRevision(self): 99 def getCurrentRevision(self):
101 """ 100 """
102 retrieves the current revision ID from the repository 101 retrieves the current revision ID from the repository
103 """ 102 """
104 command = [ 103 command = [
105 'hg', 'id', '-i', '-r', self.config.revision, '--config', 104 'hg', 'id', '-i', '-r', self.config.revision, '--config',
106 'defaults.id=', self.config.repository 105 'defaults.id=', self.config.repository,
107 ] 106 ]
108 return subprocess.check_output(command).strip() 107 return subprocess.check_output(command).strip()
109 108
110 def getCurrentBuild(self): 109 def getCurrentBuild(self):
111 """ 110 """
112 calculates the (typically numerical) build ID for the current build 111 calculates the (typically numerical) build ID for the current build
113 """ 112 """
114 command = ['hg', 'id', '-n', '--config', 'defaults.id=', self.tempdir] 113 command = ['hg', 'id', '-n', '--config', 'defaults.id=', self.tempdir]
115 build = subprocess.check_output(command).strip() 114 build = subprocess.check_output(command).strip()
116 return build 115 return build
117 116
118 def getChanges(self): 117 def getChanges(self):
119 """ 118 """
120 retrieve changes between the current and previous ("first") revision 119 retrieve changes between the current and previous ("first") revision
121 """ 120 """
122 command = [ 121 command = [
123 'hg', 'log', '-R', self.tempdir, '-r', 122 'hg', 'log', '-R', self.tempdir, '-r',
124 'reverse(ancestors({}))'.format(self.config.revision), '-l', '50', 123 'reverse(ancestors({}))'.format(self.config.revision), '-l', '50',
125 '--encoding', 'utf-8', '--template', 124 '--encoding', 'utf-8', '--template',
126 '{date|isodate}\\0{author|person}\\0{rev}\\0{desc}\\0\\0', 125 '{date|isodate}\\0{author|person}\\0{rev}\\0{desc}\\0\\0',
127 '--config', 'defaults.log=' 126 '--config', 'defaults.log=',
128 ] 127 ]
129 result = subprocess.check_output(command).decode('utf-8') 128 result = subprocess.check_output(command).decode('utf-8')
130 129
131 for change in result.split('\x00\x00'): 130 for change in result.split('\x00\x00'):
132 if change: 131 if change:
133 date, author, revision, description = change.split('\x00') 132 date, author, revision, description = change.split('\x00')
134 yield {'date': date, 'author': author, 'revision': revision, 'de scription': description} 133 yield {'date': date, 'author': author, 'revision': revision, 'de scription': description}
135 134
136 def copyRepository(self): 135 def copyRepository(self):
137 """ 136 """
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
279 os.makedirs(baseDir) 278 os.makedirs(baseDir)
280 279
281 # ABP for Android used to have its own update manifest format. We need t o 280 # ABP for Android used to have its own update manifest format. We need t o
282 # generate both that and the new one in the libadblockplus format as lon g 281 # generate both that and the new one in the libadblockplus format as lon g
283 # as a significant amount of users is on an old version. 282 # as a significant amount of users is on an old version.
284 if self.config.type == 'android': 283 if self.config.type == 'android':
285 newManifestPath = os.path.join(baseDir, 'update.json') 284 newManifestPath = os.path.join(baseDir, 'update.json')
286 writeAndroidUpdateManifest(newManifestPath, [{ 285 writeAndroidUpdateManifest(newManifestPath, [{
287 'basename': self.basename, 286 'basename': self.basename,
288 'version': self.version, 287 'version': self.version,
289 'updateURL': self.updateURL 288 'updateURL': self.updateURL,
290 }]) 289 }])
291 290
292 template = get_template(get_config().get('extensions', templateName), 291 template = get_template(get_config().get('extensions', templateName),
293 autoescape=autoescape) 292 autoescape=autoescape)
294 template.stream({'extensions': [self]}).dump(manifestPath) 293 template.stream({'extensions': [self]}).dump(manifestPath)
295 294
296 def writeIEUpdateManifest(self, versions): 295 def writeIEUpdateManifest(self, versions):
297 """ 296 """
298 Writes update.json file for the latest IE build 297 Writes update.json file for the latest IE build
299 """ 298 """
300 if len(versions) == 0: 299 if len(versions) == 0:
301 return 300 return
302 301
303 version = versions[0] 302 version = versions[0]
304 packageName = self.basename + '-' + version + self.config.packageSuffix 303 packageName = self.basename + '-' + version + self.config.packageSuffix
305 updateURL = urlparse.urljoin(self.config.nightliesURL, self.basename + ' /' + packageName + '?update') 304 updateURL = urlparse.urljoin(self.config.nightliesURL, self.basename + ' /' + packageName + '?update')
306 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) 305 baseDir = os.path.join(self.config.nightliesDirectory, self.basename)
307 manifestPath = os.path.join(baseDir, 'update.json') 306 manifestPath = os.path.join(baseDir, 'update.json')
308 307
309 from sitescripts.extensions.utils import writeIEUpdateManifest as doWrit e 308 from sitescripts.extensions.utils import writeIEUpdateManifest as doWrit e
310 doWrite(manifestPath, [{ 309 doWrite(manifestPath, [{
311 'basename': self.basename, 310 'basename': self.basename,
312 'version': version, 311 'version': version,
313 'updateURL': updateURL 312 'updateURL': updateURL,
314 }]) 313 }])
315 314
316 for suffix in ['-x86.msi', '-x64.msi', '-gpo-x86.msi', '-gpo-x64.msi']: 315 for suffix in ['-x86.msi', '-x64.msi', '-gpo-x86.msi', '-gpo-x64.msi']:
317 linkPath = os.path.join(baseDir, '00latest%s' % suffix) 316 linkPath = os.path.join(baseDir, '00latest%s' % suffix)
318 outputPath = os.path.join(baseDir, self.basename + '-' + version + s uffix) 317 outputPath = os.path.join(baseDir, self.basename + '-' + version + s uffix)
319 self.symlink_or_copy(outputPath, linkPath) 318 self.symlink_or_copy(outputPath, linkPath)
320 319
321 def build(self): 320 def build(self):
322 """ 321 """
323 run the build command in the tempdir 322 run the build command in the tempdir
324 """ 323 """
325 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) 324 baseDir = os.path.join(self.config.nightliesDirectory, self.basename)
326 if not os.path.exists(baseDir): 325 if not os.path.exists(baseDir):
327 os.makedirs(baseDir) 326 os.makedirs(baseDir)
328 outputFile = '%s-%s%s' % (self.basename, self.version, self.config.packa geSuffix) 327 outputFile = '%s-%s%s' % (self.basename, self.version, self.config.packa geSuffix)
329 self.path = os.path.join(baseDir, outputFile) 328 self.path = os.path.join(baseDir, outputFile)
330 self.updateURL = urlparse.urljoin(self.config.nightliesURL, self.basenam e + '/' + outputFile + '?update') 329 self.updateURL = urlparse.urljoin(self.config.nightliesURL, self.basenam e + '/' + outputFile + '?update')
331 330
332 if self.config.type == 'android': 331 if self.config.type == 'android':
333 apkFile = open(self.path, 'wb') 332 apkFile = open(self.path, 'wb')
334 333
335 try: 334 try:
336 try: 335 try:
337 port = get_config().get('extensions', 'androidBuildPort') 336 port = get_config().get('extensions', 'androidBuildPort')
338 except ConfigParser.NoOptionError: 337 except ConfigParser.NoOptionError:
339 port = '22' 338 port = '22'
340 command = ['ssh', '-p', port, get_config().get('extensions', 'an droidBuildHost')] 339 command = ['ssh', '-p', port, get_config().get('extensions', 'an droidBuildHost')]
341 command.extend(map(pipes.quote, [ 340 command.extend(map(pipes.quote, [
342 '/home/android/bin/makedebugbuild.py', '--revision', 341 '/home/android/bin/makedebugbuild.py', '--revision',
343 self.buildNum, '--version', self.version, '--stdout' 342 self.buildNum, '--version', self.version, '--stdout',
344 ])) 343 ]))
345 subprocess.check_call(command, stdout=apkFile, close_fds=True) 344 subprocess.check_call(command, stdout=apkFile, close_fds=True)
346 except: 345 except:
347 # clear broken output if any 346 # clear broken output if any
348 if os.path.exists(self.path): 347 if os.path.exists(self.path):
349 os.remove(self.path) 348 os.remove(self.path)
350 raise 349 raise
351 else: 350 else:
352 env = os.environ 351 env = os.environ
353 spiderMonkeyBinary = self.config.spiderMonkeyBinary 352 spiderMonkeyBinary = self.config.spiderMonkeyBinary
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
407 packageFile = self.basename + '-' + version + self.config.packageSuf fix 406 packageFile = self.basename + '-' + version + self.config.packageSuf fix
408 changelogFile = self.basename + '-' + version + '.changelog.xhtml' 407 changelogFile = self.basename + '-' + version + '.changelog.xhtml'
409 if not os.path.exists(os.path.join(baseDir, packageFile)): 408 if not os.path.exists(os.path.join(baseDir, packageFile)):
410 # Oops 409 # Oops
411 continue 410 continue
412 411
413 link = { 412 link = {
414 'version': version, 413 'version': version,
415 'download': packageFile, 414 'download': packageFile,
416 'mtime': os.path.getmtime(os.path.join(baseDir, packageFile)), 415 'mtime': os.path.getmtime(os.path.join(baseDir, packageFile)),
417 'size': os.path.getsize(os.path.join(baseDir, packageFile)) 416 'size': os.path.getsize(os.path.join(baseDir, packageFile)),
418 } 417 }
419 if os.path.exists(os.path.join(baseDir, changelogFile)): 418 if os.path.exists(os.path.join(baseDir, changelogFile)):
420 link['changelog'] = changelogFile 419 link['changelog'] = changelogFile
421 links.append(link) 420 links.append(link)
422 template = get_template(get_config().get('extensions', 'nightlyIndexPage ')) 421 template = get_template(get_config().get('extensions', 'nightlyIndexPage '))
423 template.stream({'config': self.config, 'links': links}).dump(outputPath ) 422 template.stream({'config': self.config, 'links': links}).dump(outputPath )
424 423
425 def read_downloads_lockfile(self): 424 def read_downloads_lockfile(self):
426 path = get_config().get('extensions', 'downloadLockFile') 425 path = get_config().get('extensions', 'downloadLockFile')
427 try: 426 try:
(...skipping 27 matching lines...) Expand all
455 del current[platform][i] 454 del current[platform][i]
456 if len(current[platform]) == 0: 455 if len(current[platform]) == 0:
457 del current[platform] 456 del current[platform]
458 except KeyError: 457 except KeyError:
459 pass 458 pass
460 self.write_downloads_lockfile(current) 459 self.write_downloads_lockfile(current)
461 460
462 def azure_jwt_signature_fnc(self): 461 def azure_jwt_signature_fnc(self):
463 return ( 462 return (
464 'RS256', 463 'RS256',
465 lambda s, m: PKCS1_v1_5.new(s).sign(Crypto.Hash.SHA256.new(m)) 464 lambda s, m: PKCS1_v1_5.new(s).sign(Crypto.Hash.SHA256.new(m)),
466 ) 465 )
467 466
468 def mozilla_jwt_signature_fnc(self): 467 def mozilla_jwt_signature_fnc(self):
469 return ( 468 return (
470 'HS256', 469 'HS256',
471 lambda s, m: hmac.new(s, msg=m, digestmod=hashlib.sha256).digest() 470 lambda s, m: hmac.new(s, msg=m, digestmod=hashlib.sha256).digest(),
472 ) 471 )
473 472
474 def sign_jwt(self, issuer, secret, url, signature_fnc, jwt_headers={}): 473 def sign_jwt(self, issuer, secret, url, signature_fnc, jwt_headers={}):
475 alg, fnc = signature_fnc() 474 alg, fnc = signature_fnc()
476 475
477 header = {'typ': 'JWT'} 476 header = {'typ': 'JWT'}
478 header.update(jwt_headers) 477 header.update(jwt_headers)
479 header.update({'alg': alg}) 478 header.update({'alg': alg})
480 479
481 issued = int(time.time()) 480 issued = int(time.time())
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
517 config = get_config() 516 config = get_config()
518 517
519 upload_url = ('https://addons.mozilla.org/api/v3/addons/{}/' 518 upload_url = ('https://addons.mozilla.org/api/v3/addons/{}/'
520 'versions/{}/').format(self.extensionID, self.version) 519 'versions/{}/').format(self.extensionID, self.version)
521 520
522 with open(self.path, 'rb') as file: 521 with open(self.path, 'rb') as file:
523 data, content_type = urllib3.filepost.encode_multipart_formdata({ 522 data, content_type = urllib3.filepost.encode_multipart_formdata({
524 'upload': ( 523 'upload': (
525 os.path.basename(self.path), 524 os.path.basename(self.path),
526 file.read(), 525 file.read(),
527 'application/x-xpinstall' 526 'application/x-xpinstall',
528 ) 527 ),
529 }) 528 })
530 529
531 request = self.generate_mozilla_jwt_request( 530 request = self.generate_mozilla_jwt_request(
532 config.get('extensions', 'amo_key'), 531 config.get('extensions', 'amo_key'),
533 config.get('extensions', 'amo_secret'), 532 config.get('extensions', 'amo_secret'),
534 upload_url, 533 upload_url,
535 'PUT', 534 'PUT',
536 data, 535 data,
537 [('Content-Type', content_type)], 536 [('Content-Type', content_type)],
538 ) 537 )
539 538
540 try: 539 try:
541 urllib2.urlopen(request).close() 540 urllib2.urlopen(request).close()
542 except urllib2.HTTPError as e: 541 except urllib2.HTTPError as e:
543 try: 542 try:
544 logging.error(e.read()) 543 logging.error(e.read())
545 finally: 544 finally:
546 e.close() 545 e.close()
547 raise 546 raise
548 547
549 self.add_to_downloads_lockfile( 548 self.add_to_downloads_lockfile(
550 self.config.type, 549 self.config.type,
551 { 550 {
552 'buildtype': 'devbuild', 551 'buildtype': 'devbuild',
553 'app_id': self.extensionID, 552 'app_id': self.extensionID,
554 'version': self.version, 553 'version': self.version,
555 } 554 },
556 ) 555 )
557 os.remove(self.path) 556 os.remove(self.path)
558 557
559 def download_from_mozilla_addons(self, buildtype, version, app_id): 558 def download_from_mozilla_addons(self, buildtype, version, app_id):
560 config = get_config() 559 config = get_config()
561 iss = config.get('extensions', 'amo_key') 560 iss = config.get('extensions', 'amo_key')
562 secret = config.get('extensions', 'amo_secret') 561 secret = config.get('extensions', 'amo_secret')
563 562
564 url = ('https://addons.mozilla.org/api/v3/addons/{}/' 563 url = ('https://addons.mozilla.org/api/v3/addons/{}/'
565 'versions/{}/').format(app_id, version) 564 'versions/{}/').format(app_id, version)
566 565
567 request = self.generate_mozilla_jwt_request( 566 request = self.generate_mozilla_jwt_request(
568 iss, secret, url, 'GET', 567 iss, secret, url, 'GET',
569 ) 568 )
570 response = json.load(urllib2.urlopen(request)) 569 response = json.load(urllib2.urlopen(request))
571 570
572 filename = '{}-{}.xpi'.format(self.basename, version) 571 filename = '{}-{}.xpi'.format(self.basename, version)
573 self.path = os.path.join( 572 self.path = os.path.join(
574 config.get('extensions', 'nightliesDirectory'), 573 config.get('extensions', 'nightliesDirectory'),
575 self.basename, 574 self.basename,
576 filename 575 filename,
577 ) 576 )
578 577
579 necessary = ['passed_review', 'reviewed', 'processed', 'valid'] 578 necessary = ['passed_review', 'reviewed', 'processed', 'valid']
580 if all(response[x] for x in necessary): 579 if all(response[x] for x in necessary):
581 download_url = response['files'][0]['download_url'] 580 download_url = response['files'][0]['download_url']
582 checksum = response['files'][0]['hash'] 581 checksum = response['files'][0]['hash']
583 582
584 request = self.generate_mozilla_jwt_request( 583 request = self.generate_mozilla_jwt_request(
585 iss, secret, download_url, 'GET', 584 iss, secret, download_url, 'GET',
586 ) 585 )
(...skipping 10 matching lines...) Expand all
597 if returned_checksum != checksum: 596 if returned_checksum != checksum:
598 logging.error('Checksum could not be verified: {} vs {}' 597 logging.error('Checksum could not be verified: {} vs {}'
599 ''.format(checksum, returned_checksum)) 598 ''.format(checksum, returned_checksum))
600 599
601 with open(self.path, 'w') as fp: 600 with open(self.path, 'w') as fp:
602 fp.write(file_content) 601 fp.write(file_content)
603 602
604 self.update_link = os.path.join( 603 self.update_link = os.path.join(
605 config.get('extensions', 'nightliesURL'), 604 config.get('extensions', 'nightliesURL'),
606 self.basename, 605 self.basename,
607 filename 606 filename,
608 ) 607 )
609 608
610 self.remove_from_downloads_lockfile(self.config.type, 609 self.remove_from_downloads_lockfile(self.config.type,
611 'version', 610 'version',
612 version) 611 version)
613 elif not response['passed_review'] or not response['valid']: 612 elif not response['passed_review'] or not response['valid']:
614 # When the review failed for any reason, we want to know about it 613 # When the review failed for any reason, we want to know about it
615 logging.error(json.dumps(response, indent=4)) 614 logging.error(json.dumps(response, indent=4))
616 self.remove_from_downloads_lockfile(self.config.type, 615 self.remove_from_downloads_lockfile(self.config.type,
617 'version', 616 'version',
618 version) 617 version)
619 618
620 def uploadToChromeWebStore(self): 619 def uploadToChromeWebStore(self):
621 620
622 opener = urllib2.build_opener(HTTPErrorBodyHandler) 621 opener = urllib2.build_opener(HTTPErrorBodyHandler)
623 622
624 # use refresh token to obtain a valid access token 623 # use refresh token to obtain a valid access token
625 # https://developers.google.com/accounts/docs/OAuth2WebServer#refresh 624 # https://developers.google.com/accounts/docs/OAuth2WebServer#refresh
626 625
627 response = json.load(opener.open( 626 response = json.load(opener.open(
628 'https://accounts.google.com/o/oauth2/token', 627 'https://accounts.google.com/o/oauth2/token',
629 628
630 urlencode([ 629 urlencode([
631 ('refresh_token', self.config.refreshToken), 630 ('refresh_token', self.config.refreshToken),
632 ('client_id', self.config.clientID), 631 ('client_id', self.config.clientID),
633 ('client_secret', self.config.clientSecret), 632 ('client_secret', self.config.clientSecret),
634 ('grant_type', 'refresh_token'), 633 ('grant_type', 'refresh_token'),
635 ]) 634 ]),
636 )) 635 ))
637 636
638 auth_token = '%s %s' % (response['token_type'], response['access_token'] ) 637 auth_token = '%s %s' % (response['token_type'], response['access_token'] )
639 638
640 # upload a new version with the Chrome Web Store API 639 # upload a new version with the Chrome Web Store API
641 # https://developer.chrome.com/webstore/using_webstore_api#uploadexisitn g 640 # https://developer.chrome.com/webstore/using_webstore_api#uploadexisitn g
642 641
643 request = urllib2.Request('https://www.googleapis.com/upload/chromewebst ore/v1.1/items/' + self.config.devbuildGalleryID) 642 request = urllib2.Request('https://www.googleapis.com/upload/chromewebst ore/v1.1/items/' + self.config.devbuildGalleryID)
644 request.get_method = lambda: 'PUT' 643 request.get_method = lambda: 'PUT'
645 request.add_header('Authorization', auth_token) 644 request.add_header('Authorization', auth_token)
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
696 'client_assertion': signed, 695 'client_assertion': signed,
697 } 696 }
698 697
699 request = urllib2.Request(url, urlencode(oauth_params)) 698 request = urllib2.Request(url, urlencode(oauth_params))
700 request.get_method = lambda: 'POST' 699 request.get_method = lambda: 'POST'
701 700
702 return request 701 return request
703 702
704 def get_windows_store_access_token(self): 703 def get_windows_store_access_token(self):
705 # use client certificate to obtain a valid access token 704 # use client certificate to obtain a valid access token
706 url = 'https://login.microsoftonline.com/{}/oauth2/token'.format( 705 url_template = 'https://login.microsoftonline.com/{}/oauth2/token'
707 self.config.tenantID 706 url = url_template.format(self.config.tenantID)
708 )
709 707
710 with open(self.config.privateKey, 'r') as fp: 708 with open(self.config.privateKey, 'r') as fp:
711 private_key = fp.read() 709 private_key = fp.read()
712 710
713 opener = urllib2.build_opener(HTTPErrorBodyHandler) 711 opener = urllib2.build_opener(HTTPErrorBodyHandler)
714 request = self.generate_certificate_token_request(url, private_key) 712 request = self.generate_certificate_token_request(url, private_key)
715 713
716 with contextlib.closing(opener.open(request)) as response: 714 with contextlib.closing(opener.open(request)) as response:
717 data = json.load(response) 715 data = json.load(response)
718 auth_token = '{0[token_type]} {0[access_token]}'.format(data) 716 auth_token = '{0[token_type]} {0[access_token]}'.format(data)
(...skipping 25 matching lines...) Expand all
744 def upload_to_windows_store(self): 742 def upload_to_windows_store(self):
745 opener = urllib2.build_opener(HTTPErrorBodyHandler) 743 opener = urllib2.build_opener(HTTPErrorBodyHandler)
746 744
747 headers = {'Authorization': self.get_windows_store_access_token(), 745 headers = {'Authorization': self.get_windows_store_access_token(),
748 'Content-type': 'application/json'} 746 'Content-type': 'application/json'}
749 747
750 # Get application 748 # Get application
751 # https://docs.microsoft.com/en-us/windows/uwp/monetize/get-an-app 749 # https://docs.microsoft.com/en-us/windows/uwp/monetize/get-an-app
752 api_path = '{}/v1.0/my/applications/{}'.format( 750 api_path = '{}/v1.0/my/applications/{}'.format(
753 'https://manage.devcenter.microsoft.com', 751 'https://manage.devcenter.microsoft.com',
754 self.config.devbuildGalleryID 752 self.config.devbuildGalleryID,
755 ) 753 )
756 754
757 request = urllib2.Request(api_path, None, headers) 755 request = urllib2.Request(api_path, None, headers)
758 with contextlib.closing(opener.open(request)) as response: 756 with contextlib.closing(opener.open(request)) as response:
759 app_obj = json.load(response) 757 app_obj = json.load(response)
760 758
761 # Delete existing in-progress submission 759 # Delete existing in-progress submission
762 # https://docs.microsoft.com/en-us/windows/uwp/monetize/delete-an-app-su bmission 760 # https://docs.microsoft.com/en-us/windows/uwp/monetize/delete-an-app-su bmission
763 submissions_path = api_path + '/submissions' 761 submissions_path = api_path + '/submissions'
764 if 'pendingApplicationSubmission' in app_obj: 762 if 'pendingApplicationSubmission' in app_obj:
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
891 # write update manifest 889 # write update manifest
892 self.writeUpdateManifest() 890 self.writeUpdateManifest()
893 891
894 # retire old builds 892 # retire old builds
895 versions = self.retireBuilds() 893 versions = self.retireBuilds()
896 # update index page 894 # update index page
897 self.updateIndex(versions) 895 self.updateIndex(versions)
898 896
899 # Update soft link to latest build 897 # Update soft link to latest build
900 baseDir = os.path.join( 898 baseDir = os.path.join(
901 self.config.nightliesDirectory, self.basename 899 self.config.nightliesDirectory, self.basename,
902 ) 900 )
903 linkPath = os.path.join( 901 linkPath = os.path.join(
904 baseDir, '00latest' + self.config.packageSuffix 902 baseDir, '00latest' + self.config.packageSuffix,
905 ) 903 )
906 904
907 self.symlink_or_copy(self.path, linkPath) 905 self.symlink_or_copy(self.path, linkPath)
908 finally: 906 finally:
909 # clean up 907 # clean up
910 if self.tempdir: 908 if self.tempdir:
911 shutil.rmtree(self.tempdir, ignore_errors=True) 909 shutil.rmtree(self.tempdir, ignore_errors=True)
912 910
913 911
914 def main(download=False): 912 def main(download=False):
(...skipping 23 matching lines...) Expand all
938 936
939 file = open(nightlyConfigFile, 'wb') 937 file = open(nightlyConfigFile, 'wb')
940 nightlyConfig.write(file) 938 nightlyConfig.write(file)
941 939
942 940
943 if __name__ == '__main__': 941 if __name__ == '__main__':
944 parser = argparse.ArgumentParser() 942 parser = argparse.ArgumentParser()
945 parser.add_argument('--download', action='store_true', default=False) 943 parser.add_argument('--download', action='store_true', default=False)
946 args = parser.parse_args() 944 args = parser.parse_args()
947 main(args.download) 945 main(args.download)
LEFTRIGHT

Powered by Google App Engine
This is Rietveld