LEFT | RIGHT |
1 # coding: utf-8 | 1 # coding: utf-8 |
2 | 2 |
3 # This file is part of the Adblock Plus build tools, | 3 # This file is part of the Adblock Plus build tools, |
4 # Copyright (C) 2006-2013 Eyeo GmbH | 4 # Copyright (C) 2006-2013 Eyeo GmbH |
5 # | 5 # |
6 # Adblock Plus is free software: you can redistribute it and/or modify | 6 # Adblock Plus is free software: you can redistribute it and/or modify |
7 # it under the terms of the GNU General Public License version 3 as | 7 # it under the terms of the GNU General Public License version 3 as |
8 # published by the Free Software Foundation. | 8 # published by the Free Software Foundation. |
9 # | 9 # |
10 # Adblock Plus is distributed in the hope that it will be useful, | 10 # Adblock Plus is distributed in the hope that it will be useful, |
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of | 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
13 # GNU General Public License for more details. | 13 # GNU General Public License for more details. |
14 # | 14 # |
15 # You should have received a copy of the GNU General Public License | 15 # You should have received a copy of the GNU General Public License |
16 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. | 16 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. |
17 | 17 |
18 import re, json | 18 import os |
| 19 import re |
| 20 import json |
| 21 import ConfigParser |
19 from urlparse import urlparse | 22 from urlparse import urlparse |
20 from collections import OrderedDict | |
21 | 23 |
22 from packager import readMetadata, getDefaultFileName, getBuildVersion, getTempl
ate, Files | 24 from packager import readMetadata, getDefaultFileName, getBuildVersion, getTempl
ate, Files |
23 from buildtools.packagerChrome import convertJS, importGeckoLocales, getIgnoredF
iles, getPackageFiles, ImageConverter | 25 from packagerChrome import convertJS, importGeckoLocales, getIgnoredFiles, getPa
ckageFiles, defaultLocale |
24 | 26 |
25 def createPlist(params, files): | 27 def processFile(path, data, params): |
26 template = getTemplate('Info.plist.tmpl') | 28 return data |
| 29 |
| 30 def createManifest(params, files): |
| 31 template = getTemplate('Info.plist.tmpl', autoEscape=True) |
27 metadata = params['metadata'] | 32 metadata = params['metadata'] |
28 catalog = json.loads(files['_locales/en_US/messages.json']) | 33 catalog = json.loads(files['_locales/%s/messages.json' % defaultLocale]) |
29 | 34 |
30 def toxml(val, indent=0): | 35 def parse_section(section, depth=1): |
31 if isinstance(val, bool): | 36 result = {} |
32 return '<true/>' if val else '<false/>' | |
33 if isinstance(val, (int, long)): | |
34 return '<real>%d</real>' % val | |
35 if isinstance(val, basestring): | |
36 return '<string>%s</string>' % val | |
37 | |
38 def parse_section(section, levels=1): | |
39 rv = OrderedDict() | |
40 | 37 |
41 if not metadata.has_section(section): | 38 if not metadata.has_section(section): |
42 return rv | 39 return result |
43 | 40 |
44 for opt in metadata.options(section): | 41 for opt in metadata.options(section): |
45 bits = opt.split('_', levels) | 42 bits = opt.split('_', depth) |
46 key = bits.pop(-1).replace('_', ' ').title() | 43 key = bits.pop().replace('_', ' ').title() |
47 d = rv | 44 val = metadata.get(section, opt) |
48 | 45 |
49 for x in bits: | 46 try: |
| 47 val = int(val) |
| 48 except ValueError: |
50 try: | 49 try: |
51 d = d[x] | 50 val = float(val) |
52 except KeyError: | 51 except ValueError: |
53 d[x] = d = OrderedDict() | 52 pass |
54 | 53 |
55 d[key] = metadata.get(section, opt) | 54 reduce(lambda d, x: d.setdefault(x, {}), bits, result)[key] = val |
56 | 55 |
57 return rv | 56 return result |
| 57 |
| 58 def get_optional(*args): |
| 59 try: |
| 60 return metadata.get(*args) |
| 61 except ConfigParser.Error: |
| 62 return None |
58 | 63 |
59 allowedDomains = set() | 64 allowedDomains = set() |
60 allowAllDomains = False | 65 allowAllDomains = False |
61 allowSecurePages = False | 66 allowSecurePages = False |
62 | 67 |
63 for perm in re.split(r'\s+', metadata.get('general', 'permissions')): | 68 for perm in metadata.get('general', 'permissions').split(): |
64 if perm == '<all_urls>': | 69 if perm == '<all_urls>': |
65 allowAllDomains = True | 70 allowAllDomains = True |
66 allowSecurePages = True | 71 allowSecurePages = True |
67 continue | 72 continue |
68 | 73 |
69 url = urlparse(perm) | 74 url = urlparse(perm) |
70 | 75 |
71 if url.scheme == 'https': | 76 if url.scheme == 'https': |
72 allowSecurePages = True | 77 allowSecurePages = True |
73 elif url.scheme != 'http': | 78 elif url.scheme != 'http': |
74 continue | 79 continue |
75 | 80 |
76 if '*' in url.hostname: | 81 if '*' in url.hostname: |
77 allowAllDomains = True | 82 allowAllDomains = True |
78 continue | 83 continue |
79 | 84 |
80 allowedDomains.add(url.hostname) | 85 allowedDomains.add(url.hostname) |
81 | 86 |
82 menus = parse_section('menus', 2) | |
83 toolbarItems = parse_section('toolbar_items') | |
84 | |
85 return template.render( | 87 return template.render( |
86 author=metadata.get('general', 'author'), | 88 basename=metadata.get('general', 'basename'), |
87 version=params['version'], | 89 version=params['version'], |
| 90 shortVersion=metadata.get('general', 'version'), |
| 91 releaseBuild=params['releaseBuild'], |
88 name=catalog['name']['message'], | 92 name=catalog['name']['message'], |
89 description=catalog['description']['message'], | 93 description=catalog['description_safari']['message'], |
90 website=metadata.get('general', 'website'), | 94 author=get_optional('general', 'author'), |
91 identifier=metadata.get('general', 'identifier'), | 95 homepage=get_optional('general', 'homepage'), |
| 96 updateURL=get_optional('general', 'updateURL'), |
92 allowedDomains=allowedDomains, | 97 allowedDomains=allowedDomains, |
93 allowAllDomains=allowAllDomains, | 98 allowAllDomains=allowAllDomains, |
94 allowSecurePages=allowSecurePages, | 99 allowSecurePages=allowSecurePages, |
95 contentScripts={ | 100 startScripts=(get_optional('contentScripts', 'document_start') or '').split(
), |
96 'start': metadata.get('contentScripts', 'document_start').split(), | 101 endScripts=(get_optional('contentScripts', 'document_end') or '').split(), |
97 'end': metadata.get('contentScripts', 'document_end' ).split(), | |
98 }, | |
99 menus=parse_section('menus', 2), | 102 menus=parse_section('menus', 2), |
100 toolbarItems=parse_section('toolbar_items'), | 103 toolbarItems=parse_section('toolbar_items'), |
101 popovers=parse_section('popovers'), | 104 popovers=parse_section('popovers') |
102 toxml=toxml | |
103 ).encode('utf-8') | 105 ).encode('utf-8') |
104 | 106 |
105 def createBackgroundPage(params): | 107 def createBackgroundPage(params): |
106 template = getTemplate('background.html.tmpl') | 108 template = getTemplate('background.html.tmpl', autoEscape=True) |
107 return template.render( | 109 return template.render( |
108 backgroundScripts=re.split(r'\s+', params['metadata'].get( | 110 backgroundScripts=params['metadata'].get( |
109 'general', 'backgroundScripts' | 111 'general', 'backgroundScripts' |
110 )) | 112 ).split() |
111 ).encode('utf-8') | 113 ).encode('utf-8') |
112 | 114 |
113 def createInfoModule(params): | 115 def createInfoModule(params): |
114 template = getTemplate('safariInfo.js.tmpl') | 116 template = getTemplate('safariInfo.js.tmpl') |
115 return template.render(params).encode('utf-8'); | 117 return template.render(params).encode('utf-8') |
116 | 118 |
117 def createBuild(baseDir, type, outFile=None, buildNum=None, releaseBuild=False): | 119 def fixAbsoluteUrls(files): |
| 120 for filename, content in files.iteritems(): |
| 121 if os.path.splitext(filename)[1].lower() == '.html': |
| 122 files[filename] = re.sub( |
| 123 r'(<[^<>]*?\b(?:href|src)\s*=\s*["\']?)\/+', |
| 124 r'\1' + '/'.join(['..'] * filename.count('/') + ['']), |
| 125 content, re.S | re.I |
| 126 ) |
| 127 |
| 128 def createSignedXarArchive(outFile, files, keyFile): |
| 129 import subprocess |
| 130 import tempfile |
| 131 import shutil |
| 132 import M2Crypto |
| 133 |
| 134 # write files to temporary directory and create a xar archive |
| 135 dirname = tempfile.mkdtemp() |
| 136 try: |
| 137 for filename, contents in files.iteritems(): |
| 138 path = os.path.join(dirname, filename) |
| 139 |
| 140 try: |
| 141 os.makedirs(os.path.dirname(path)) |
| 142 except OSError: |
| 143 pass |
| 144 |
| 145 with open(path, 'wb') as file: |
| 146 file.write(contents) |
| 147 |
| 148 subprocess.check_output( |
| 149 ['xar', '-czf', os.path.abspath(outFile), '--distribution'] + os.listdir(d
irname), |
| 150 cwd=dirname |
| 151 ) |
| 152 finally: |
| 153 shutil.rmtree(dirname) |
| 154 |
| 155 certificate_filenames = [] |
| 156 try: |
| 157 # load key and certificates from the all-in-one key file |
| 158 # and write each certificate in DER format to a seperate |
| 159 # temporary file, that they can be passed to xar |
| 160 bio = M2Crypto.BIO.openfile(keyFile) |
| 161 try: |
| 162 key = M2Crypto.RSA.load_key_bio(bio) |
| 163 |
| 164 bio.reset() |
| 165 while True: |
| 166 try: |
| 167 cert = M2Crypto.X509.load_cert_bio(bio) |
| 168 except M2Crypto.X509.X509Error: |
| 169 break |
| 170 |
| 171 fd, filename = tempfile.mkstemp() |
| 172 try: |
| 173 certificate_filenames.append(filename) |
| 174 os.write(fd, cert.as_der()) |
| 175 finally: |
| 176 os.close(fd) |
| 177 finally: |
| 178 bio.close() |
| 179 |
| 180 # add certificates and placeholder signature |
| 181 # to the xar archive, and get data to sign |
| 182 fd, digestinfo_filename = tempfile.mkstemp() |
| 183 os.close(fd) |
| 184 try: |
| 185 subprocess.check_call( |
| 186 [ |
| 187 'xar', '--sign', '-f', outFile, |
| 188 '--digestinfo-to-sign', digestinfo_filename, |
| 189 '--sig-size', str(len(key.private_encrypt('', M2Crypto.RSA.pkcs1_paddi
ng))) |
| 190 ] + [ |
| 191 arg for cert in certificate_filenames for arg in ('--cert-loc', cert) |
| 192 ] |
| 193 ) |
| 194 |
| 195 with open(digestinfo_filename, 'rb') as file: |
| 196 digestinfo = file.read() |
| 197 finally: |
| 198 os.unlink(digestinfo_filename) |
| 199 finally: |
| 200 for filename in certificate_filenames: |
| 201 os.unlink(filename) |
| 202 |
| 203 # sign data and inject signature into xar archive |
| 204 fd, signature_filename = tempfile.mkstemp() |
| 205 try: |
| 206 try: |
| 207 os.write(fd, key.private_encrypt( |
| 208 digestinfo, |
| 209 M2Crypto.RSA.pkcs1_padding |
| 210 )) |
| 211 finally: |
| 212 os.close(fd) |
| 213 |
| 214 subprocess.check_call(['xar', '--inject-sig', signature_filename, '-f', outF
ile]) |
| 215 finally: |
| 216 os.unlink(signature_filename) |
| 217 |
| 218 def createBuild(baseDir, type, outFile=None, buildNum=None, releaseBuild=False,
keyFile=None): |
118 metadata = readMetadata(baseDir, type) | 219 metadata = readMetadata(baseDir, type) |
119 version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum) | 220 version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum) |
120 | 221 |
121 if outFile == None: | 222 if not outFile: |
122 outFile = getDefaultFileName(baseDir, metadata, version, 'zip') | 223 outFile = getDefaultFileName(baseDir, metadata, version, 'safariextz' if key
File else 'zip') |
123 | 224 |
124 params = { | 225 params = { |
125 'type': type, | 226 'type': type, |
126 'baseDir': baseDir, | 227 'baseDir': baseDir, |
127 'releaseBuild': releaseBuild, | 228 'releaseBuild': releaseBuild, |
128 'version': version, | 229 'version': version, |
129 'devenv': False, | 230 'devenv': False, |
130 'metadata': metadata, | 231 'metadata': metadata, |
131 } | 232 } |
132 | 233 |
133 files = Files(getPackageFiles(params), getIgnoredFiles(params), | 234 files = Files(getPackageFiles(params), getIgnoredFiles(params), |
134 process=lambda path, data: data) | 235 process=lambda path, data: processFile(path, data, params)) |
135 if metadata.has_section('mapping'): | 236 if metadata.has_section('mapping'): |
136 files.readMappedFiles(metadata.items('mapping')) | 237 files.readMappedFiles(metadata.items('mapping')) |
137 files.read(baseDir) | 238 files.read(baseDir) |
138 | 239 |
139 if metadata.has_section('convert_js'): | 240 if metadata.has_section('convert_js'): |
140 convertJS(params, files) | 241 convertJS(params, files) |
141 | 242 |
142 if metadata.has_section('convert_img'): | 243 if metadata.has_section('convert_img'): |
143 ImageConverter().convert(params, files) | 244 from imageConversion import convertImages |
| 245 convertImages(params, files) |
| 246 |
| 247 if metadata.has_section('preprocess'): |
| 248 files.preprocess( |
| 249 [f for f, _ in metadata.items('preprocess')], |
| 250 {'needsExt': True} |
| 251 ) |
144 | 252 |
145 if metadata.has_section('import_locales'): | 253 if metadata.has_section('import_locales'): |
146 importGeckoLocales(params, files) | 254 importGeckoLocales(params, files) |
147 | 255 |
148 files['lib/info.js'] = createInfoModule(params) | 256 files['lib/info.js'] = createInfoModule(params) |
149 files['background.html'] = createBackgroundPage(params) | 257 files['background.html'] = createBackgroundPage(params) |
150 files['Info.plist'] = createPlist(params, files) | 258 files['Info.plist'] = createManifest(params, files) |
151 | 259 |
152 with open(outFile, 'wb') as f: | 260 fixAbsoluteUrls(files) |
153 f.write(files.zipToString()) | 261 |
| 262 dirname = metadata.get('general', 'basename') + '.safariextension' |
| 263 for filename in files.keys(): |
| 264 files[os.path.join(dirname, filename)] = files.pop(filename) |
| 265 |
| 266 if keyFile: |
| 267 createSignedXarArchive(outFile, files, keyFile) |
| 268 else: |
| 269 files.zip(outFile) |
LEFT | RIGHT |