| Left: | ||
| Right: |
| OLD | NEW |
|---|---|
| (Empty) | |
| 1 # This Source Code Form is subject to the terms of the Mozilla Public | |
|
tlucas
2017/11/02 10:03:29
This code was completely copied from buildtools @
Vasily Kuznetsov
2017/11/30 17:34:00
Acknowledged.
| |
| 2 # License, v. 2.0. If a copy of the MPL was not distributed with this | |
| 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | |
| 4 | |
| 5 import re | |
| 6 import struct | |
| 7 import time | |
| 8 import zlib | |
| 9 | |
| 10 from Crypto.Hash import SHA | |
| 11 from Crypto.PublicKey import RSA | |
| 12 from Crypto.Signature import PKCS1_v1_5 | |
| 13 | |
| 14 from buildtools.packager import getTemplate | |
| 15 | |
| 16 XAR_HEADER = struct.Struct('>IHHQQI') | |
| 17 XAR_HEADER_MAGIC = 0x78617221 | |
| 18 XAR_VERSION = 1 | |
| 19 XAR_CKSUM_SHA1 = 1 | |
| 20 | |
| 21 | |
| 22 def read_certificates_and_key(keyfile): | |
| 23 with open(keyfile, 'r') as file: | |
| 24 data = file.read() | |
| 25 | |
| 26 certificates = [] | |
| 27 key = None | |
| 28 for match in re.finditer(r'-+BEGIN (.*?)-+(.*?)-+END \1-+', data, re.S): | |
| 29 section = match.group(1) | |
| 30 if section == 'CERTIFICATE': | |
| 31 certificates.append(re.sub(r'\s+', '', match.group(2))) | |
| 32 elif section == 'PRIVATE KEY': | |
| 33 key = RSA.importKey(match.group(0)) | |
| 34 if not key: | |
| 35 raise Exception('Could not find private key in file') | |
| 36 | |
| 37 return certificates, key | |
| 38 | |
| 39 | |
| 40 def get_checksum(data): | |
| 41 return SHA.new(data).digest() | |
| 42 | |
| 43 | |
| 44 def get_hexchecksum(data): | |
| 45 return SHA.new(data).hexdigest() | |
| 46 | |
| 47 | |
| 48 def get_signature(key, data): | |
| 49 return PKCS1_v1_5.new(key).sign(SHA.new(data)) | |
| 50 | |
| 51 | |
| 52 def compress_files(filedata, root, offset): | |
| 53 compressed_data = [] | |
| 54 filedata = sorted(filedata.iteritems()) | |
| 55 directory_stack = [('', root)] | |
| 56 file_id = 1 | |
| 57 for path, data in filedata: | |
| 58 # Remove directories that are done | |
| 59 while not path.startswith(directory_stack[-1][0]): | |
| 60 directory_stack.pop() | |
| 61 | |
| 62 # Add new directories | |
| 63 directory_path = directory_stack[-1][0] | |
| 64 relpath = path[len(directory_path):] | |
| 65 while '/' in relpath: | |
| 66 name, relpath = relpath.split('/', 1) | |
| 67 directory_path += name + '/' | |
| 68 directory = { | |
| 69 'id': file_id, | |
| 70 'name': name, | |
| 71 'type': 'directory', | |
| 72 'mode': '0755', | |
| 73 'children': [], | |
| 74 } | |
| 75 file_id += 1 | |
| 76 directory_stack[-1][1].append(directory) | |
| 77 directory_stack.append((directory_path, directory['children'])) | |
| 78 | |
| 79 # Add the actual file | |
| 80 compressed = zlib.compress(data, 9) | |
| 81 file = { | |
| 82 'id': file_id, | |
| 83 'name': relpath, | |
| 84 'type': 'file', | |
| 85 'mode': '0644', | |
| 86 'checksum_uncompressed': get_hexchecksum(data), | |
| 87 'size_uncompressed': len(data), | |
| 88 'checksum_compressed': get_hexchecksum(compressed), | |
| 89 'size_compressed': len(compressed), | |
| 90 'offset': offset, | |
| 91 } | |
| 92 file_id += 1 | |
| 93 offset += len(compressed) | |
| 94 directory_stack[-1][1].append(file) | |
| 95 compressed_data.append(compressed) | |
| 96 return compressed_data | |
| 97 | |
| 98 | |
| 99 def create(archivepath, contents, keyfile): | |
| 100 certificates, key = read_certificates_and_key(keyfile) | |
| 101 checksum_length = len(get_checksum('')) | |
| 102 params = { | |
| 103 'certificates': certificates, | |
| 104 | |
| 105 # Timestamp epoch starts at 2001-01-01T00:00:00.000Z | |
| 106 'timestamp_numerical': time.time() - 978307200, | |
| 107 'timestamp_iso': time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()), | |
| 108 | |
| 109 'checksum': { | |
| 110 'offset': 0, | |
| 111 'size': checksum_length, | |
| 112 }, | |
| 113 'signature': { | |
| 114 'offset': checksum_length, | |
| 115 'size': len(get_signature(key, '')), | |
| 116 }, | |
| 117 'files': [], | |
| 118 } | |
| 119 | |
| 120 offset = params['signature']['offset'] + params['signature']['size'] | |
| 121 compressed_data = compress_files(contents, params['files'], offset) | |
| 122 | |
| 123 template = getTemplate('xartoc.xml.tmpl', autoEscape=True) | |
| 124 toc_uncompressed = template.render(params).encode('utf-8') | |
| 125 toc_compressed = zlib.compress(toc_uncompressed, 9) | |
| 126 | |
| 127 with open(archivepath, 'wb') as file: | |
| 128 # The file starts with a minimalistic header | |
| 129 file.write(XAR_HEADER.pack(XAR_HEADER_MAGIC, XAR_HEADER.size, | |
| 130 XAR_VERSION, len(toc_compressed), | |
| 131 len(toc_uncompressed), XAR_CKSUM_SHA1)) | |
| 132 | |
| 133 # It's followed up with a compressed XML table of contents | |
| 134 file.write(toc_compressed) | |
| 135 | |
| 136 # Now the actual data, all the offsets are in the table of contents | |
| 137 file.write(get_checksum(toc_compressed)) | |
| 138 file.write(get_signature(key, toc_compressed)) | |
| 139 for blob in compressed_data: | |
| 140 file.write(blob) | |
| OLD | NEW |