OLD | NEW |
1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public |
2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this |
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
4 | 4 |
5 import errno | 5 import errno |
| 6 import glob |
6 import io | 7 import io |
7 import json | 8 import json |
8 import os | 9 import os |
9 import re | 10 import re |
10 from StringIO import StringIO | 11 from StringIO import StringIO |
11 import struct | 12 import struct |
| 13 import subprocess |
12 import sys | 14 import sys |
13 import collections | |
14 import glob | |
15 | 15 |
16 from packager import (readMetadata, getDefaultFileName, getBuildVersion, | 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion, |
17 getTemplate, Files) | 17 getTemplate, Files) |
18 | 18 |
19 defaultLocale = 'en_US' | 19 defaultLocale = 'en_US' |
20 | 20 |
21 | 21 |
22 def getIgnoredFiles(params): | 22 def getIgnoredFiles(params): |
23 return {'store.description'} | 23 return {'store.description'} |
24 | 24 |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
133 # Normalize JSON structure | 133 # Normalize JSON structure |
134 licenseComment = re.compile(r'/\*.*?\*/', re.S) | 134 licenseComment = re.compile(r'/\*.*?\*/', re.S) |
135 data = json.loads(re.sub(licenseComment, '', manifest, 1)) | 135 data = json.loads(re.sub(licenseComment, '', manifest, 1)) |
136 if '_dummy' in data: | 136 if '_dummy' in data: |
137 del data['_dummy'] | 137 del data['_dummy'] |
138 manifest = json.dumps(data, sort_keys=True, indent=2) | 138 manifest = json.dumps(data, sort_keys=True, indent=2) |
139 | 139 |
140 return manifest.encode('utf-8') | 140 return manifest.encode('utf-8') |
141 | 141 |
142 | 142 |
143 def convertJS(params, files): | |
144 output_files = collections.OrderedDict() | |
145 args = {} | |
146 | |
147 for item in params['metadata'].items('convert_js'): | |
148 name, value = item | |
149 filename, arg = re.search(r'^(.*?)(?:\[(.*)\])?$', name).groups() | |
150 if arg is None: | |
151 output_files[filename] = (value.split(), item.source) | |
152 else: | |
153 args.setdefault(filename, {})[arg] = value | |
154 | |
155 template = getTemplate('modules.js.tmpl') | |
156 | |
157 for filename, (input_files, origin) in output_files.iteritems(): | |
158 if '/' in filename and not files.isIncluded(filename): | |
159 continue | |
160 | |
161 current_args = args.get(filename, {}) | |
162 current_args['autoload'] = [module for module in | |
163 current_args.get('autoload', '').split(',') | |
164 if module != ''] | |
165 | |
166 base_dir = os.path.dirname(origin) | |
167 modules = [] | |
168 | |
169 for input_filename in input_files: | |
170 module_name = os.path.splitext(os.path.basename(input_filename))[0] | |
171 prefix = os.path.basename(os.path.dirname(input_filename)) | |
172 if prefix != 'lib': | |
173 module_name = '{}_{}'.format(prefix, module_name) | |
174 with open(os.path.join(base_dir, input_filename), 'r') as file: | |
175 modules.append((module_name, file.read().decode('utf-8'))) | |
176 files.pop(input_filename, None) | |
177 | |
178 files[filename] = template.render( | |
179 args=current_args, | |
180 basename=params['metadata'].get('general', 'basename'), | |
181 modules=modules, | |
182 type=params['type'], | |
183 version=params['metadata'].get('general', 'version') | |
184 ).encode('utf-8') | |
185 | |
186 | |
187 def toJson(data): | 143 def toJson(data): |
188 return json.dumps( | 144 return json.dumps( |
189 data, ensure_ascii=False, sort_keys=True, | 145 data, ensure_ascii=False, sort_keys=True, |
190 indent=2, separators=(',', ': ') | 146 indent=2, separators=(',', ': ') |
191 ).encode('utf-8') + '\n' | 147 ).encode('utf-8') + '\n' |
192 | 148 |
193 | 149 |
| 150 def create_bundles(params, files): |
| 151 base_extension_path = params['baseDir'] |
| 152 info_templates = { |
| 153 'chrome': 'chromeInfo.js.tmpl', |
| 154 'edge': 'edgeInfo.js.tmpl', |
| 155 'gecko-webext': 'geckoInfo.js.tmpl' |
| 156 } |
| 157 |
| 158 # Historically we didn't use relative paths when requiring modules, so in |
| 159 # order for webpack to know where to find them we need to pass in a list of |
| 160 # resolve paths. Going forward we should always use relative paths, once we |
| 161 # do that consistently this can be removed. See issues 5760, 5761 and 5762. |
| 162 resolve_paths = [os.path.join(base_extension_path, dir, 'lib') |
| 163 for dir in ['', 'adblockpluscore', 'adblockplusui']] |
| 164 |
| 165 info_template = getTemplate(info_templates[params['type']]) |
| 166 info_module = info_template.render( |
| 167 basename=params['metadata'].get('general', 'basename'), |
| 168 version=params['metadata'].get('general', 'version') |
| 169 ).encode('utf-8') |
| 170 |
| 171 configuration = { |
| 172 'bundles': [], |
| 173 'extension_path': base_extension_path, |
| 174 'info_module': info_module, |
| 175 'resolve_paths': resolve_paths, |
| 176 } |
| 177 |
| 178 for item in params['metadata'].items('bundles'): |
| 179 name, value = item |
| 180 base_item_path = os.path.dirname(item.source) |
| 181 |
| 182 bundle_file = os.path.relpath(os.path.join(base_item_path, name), |
| 183 base_extension_path) |
| 184 entry_files = [os.path.join(base_item_path, module_path) |
| 185 for module_path in value.split()] |
| 186 configuration['bundles'].append({ |
| 187 'bundle_name': bundle_file, |
| 188 'entry_points': entry_files, |
| 189 }) |
| 190 |
| 191 cmd = ['node', os.path.join(os.path.dirname(__file__), 'webpack_runner.js')] |
| 192 process = subprocess.Popen(cmd, stdout=subprocess.PIPE, |
| 193 stdin=subprocess.PIPE) |
| 194 output = process.communicate(input=toJson(configuration))[0] |
| 195 if process.returncode != 0: |
| 196 raise subprocess.CalledProcessError(process.returncode, cmd=cmd) |
| 197 output = json.loads(output) |
| 198 |
| 199 # Clear the mapping for any files included in a bundle, to avoid them being |
| 200 # duplicated in the build. |
| 201 for to_ignore in output['included']: |
| 202 files.pop(to_ignore, None) |
| 203 |
| 204 for bundle in output['files']: |
| 205 files[bundle] = output['files'][bundle].encode('utf-8') |
| 206 |
| 207 |
194 def import_locales(params, files): | 208 def import_locales(params, files): |
195 for item in params['metadata'].items('import_locales'): | 209 for item in params['metadata'].items('import_locales'): |
196 filename, keys = item | 210 filename, keys = item |
197 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source), | 211 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source), |
198 *filename.split('/'))): | 212 *filename.split('/'))): |
199 locale = sourceFile.split(os.path.sep)[-2] | 213 locale = sourceFile.split(os.path.sep)[-2] |
200 targetFile = os.path.join('_locales', locale, 'messages.json') | 214 targetFile = os.path.join('_locales', locale, 'messages.json') |
201 data = json.loads(files.get(targetFile, '{}').decode('utf-8')) | 215 data = json.loads(files.get(targetFile, '{}').decode('utf-8')) |
202 | 216 |
203 try: | 217 try: |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
325 'metadata': metadata, | 339 'metadata': metadata, |
326 } | 340 } |
327 | 341 |
328 mapped = metadata.items('mapping') if metadata.has_section('mapping') else [
] | 342 mapped = metadata.items('mapping') if metadata.has_section('mapping') else [
] |
329 files = Files(getPackageFiles(params), getIgnoredFiles(params), | 343 files = Files(getPackageFiles(params), getIgnoredFiles(params), |
330 process=lambda path, data: processFile(path, data, params)) | 344 process=lambda path, data: processFile(path, data, params)) |
331 | 345 |
332 files.readMappedFiles(mapped) | 346 files.readMappedFiles(mapped) |
333 files.read(baseDir, skip=[opt for opt, _ in mapped]) | 347 files.read(baseDir, skip=[opt for opt, _ in mapped]) |
334 | 348 |
335 if metadata.has_section('convert_js'): | 349 if metadata.has_section('bundles'): |
336 convertJS(params, files) | 350 create_bundles(params, files) |
337 | 351 |
338 if metadata.has_section('preprocess'): | 352 if metadata.has_section('preprocess'): |
339 files.preprocess( | 353 files.preprocess( |
340 [f for f, _ in metadata.items('preprocess')], | 354 [f for f, _ in metadata.items('preprocess')], |
341 {'needsExt': True} | 355 {'needsExt': True} |
342 ) | 356 ) |
343 | 357 |
344 if metadata.has_section('import_locales'): | 358 if metadata.has_section('import_locales'): |
345 import_locales(params, files) | 359 import_locales(params, files) |
346 | 360 |
(...skipping 12 matching lines...) Expand all Loading... |
359 params, 'testIndex.html.tmpl', ('general', 'testScripts') | 373 params, 'testIndex.html.tmpl', ('general', 'testScripts') |
360 ) | 374 ) |
361 | 375 |
362 zipdata = files.zipToString() | 376 zipdata = files.zipToString() |
363 signature = None | 377 signature = None |
364 pubkey = None | 378 pubkey = None |
365 if keyFile != None: | 379 if keyFile != None: |
366 signature = signBinary(zipdata, keyFile) | 380 signature = signBinary(zipdata, keyFile) |
367 pubkey = getPublicKey(keyFile) | 381 pubkey = getPublicKey(keyFile) |
368 writePackage(outFile, pubkey, signature, zipdata) | 382 writePackage(outFile, pubkey, signature, zipdata) |
OLD | NEW |