 Issue 29549786:
  Issue 5535 - Replace our module system with webpack  (Closed)
    
  
    Issue 29549786:
  Issue 5535 - Replace our module system with webpack  (Closed) 
  | Left: | ||
| Right: | 
| LEFT | RIGHT | 
|---|---|
| 1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public | 
| 2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 
| 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 
| 4 | 4 | 
| 5 import errno | 5 import errno | 
| 6 import glob | |
| 6 import io | 7 import io | 
| 7 import json | 8 import json | 
| 8 import os | 9 import os | 
| 9 import re | 10 import re | 
| 10 from StringIO import StringIO | 11 from StringIO import StringIO | 
| 11 import struct | 12 import struct | 
| 12 import subprocess | 13 import subprocess | 
| 13 import sys | 14 import sys | 
| 14 | 15 | 
| 15 from ensure_dependencies import read_deps | |
| 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion, | 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion, | 
| 17 getTemplate, Files) | 17 getTemplate, Files) | 
| 18 | 18 | 
| 19 defaultLocale = 'en_US' | 19 defaultLocale = 'en_US' | 
| 20 | 20 | 
| 21 | 21 | 
| 22 def getIgnoredFiles(params): | 22 def getIgnoredFiles(params): | 
| 23 return {'store.description'} | 23 return {'store.description'} | 
| 24 | 24 | 
| 25 | 25 | 
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 133 # Normalize JSON structure | 133 # Normalize JSON structure | 
| 134 licenseComment = re.compile(r'/\*.*?\*/', re.S) | 134 licenseComment = re.compile(r'/\*.*?\*/', re.S) | 
| 135 data = json.loads(re.sub(licenseComment, '', manifest, 1)) | 135 data = json.loads(re.sub(licenseComment, '', manifest, 1)) | 
| 136 if '_dummy' in data: | 136 if '_dummy' in data: | 
| 137 del data['_dummy'] | 137 del data['_dummy'] | 
| 138 manifest = json.dumps(data, sort_keys=True, indent=2) | 138 manifest = json.dumps(data, sort_keys=True, indent=2) | 
| 139 | 139 | 
| 140 return manifest.encode('utf-8') | 140 return manifest.encode('utf-8') | 
| 141 | 141 | 
| 142 | 142 | 
| 143 def toJson(data): | |
| 144 return json.dumps( | |
| 145 data, ensure_ascii=False, sort_keys=True, | |
| 146 indent=2, separators=(',', ': ') | |
| 147 ).encode('utf-8') + '\n' | |
| 148 | |
| 149 | |
| 143 def create_bundles(params, files): | 150 def create_bundles(params, files): | 
| 144 base_extension_path = params['baseDir'] | 151 base_extension_path = params['baseDir'] | 
| 145 info_templates = { | 152 info_templates = { | 
| 146 'chrome': 'chromeInfo.js.tmpl', | 153 'chrome': 'chromeInfo.js.tmpl', | 
| 147 'edge': 'edgeInfo.js.tmpl', | 154 'edge': 'edgeInfo.js.tmpl', | 
| 148 'gecko-webext': 'geckoInfo.js.tmpl' | 155 'gecko-webext': 'geckoInfo.js.tmpl' | 
| 149 } | 156 } | 
| 150 | 157 | 
| 151 # Historically we didn't use relative paths when requiring modules, so in | 158 # Historically we didn't use relative paths when requiring modules, so in | 
| 152 # order for webpack to know where to find them we need to pass in a list of | 159 # order for webpack to know where to find them we need to pass in a list of | 
| 153 # resolve paths. Going forward we should always use relative paths, once we | 160 # resolve paths. Going forward we should always use relative paths, once we | 
| 154 # do that consistently this can be removed. See issues 5760, 5761 and 5762. | 161 # do that consistently this can be removed. See issues 5760, 5761 and 5762. | 
| 155 resolve_paths = ' '.join( | 162 resolve_paths = [os.path.join(base_extension_path, dir, 'lib') | 
| 156 [os.path.join(base_extension_path, dir, 'lib') | 163 for dir in ['', 'adblockpluscore', 'adblockplusui']] | 
| 157 for dir in ['', 'adblockpluscore', 'adblockplusui']] | 164 | 
| 158 ) | 165 info_template = getTemplate(info_templates[params['type']]) | 
| 159 | 166 info_module = info_template.render( | 
| 160 template = getTemplate(info_templates[params['type']]) | |
| 
Wladimir Palant
2017/10/04 10:38:06
Nit: this variable is better named info_template I
 
kzar
2017/10/04 13:13:25
Done.
 | |
| 161 info_module = template.render( | |
| 162 basename=params['metadata'].get('general', 'basename'), | 167 basename=params['metadata'].get('general', 'basename'), | 
| 163 version=params['metadata'].get('general', 'version') | 168 version=params['metadata'].get('general', 'version') | 
| 164 ).encode('utf-8') | 169 ).encode('utf-8') | 
| 170 | |
| 171 configuration = { | |
| 172 'bundles': [], | |
| 173 'extension_path': base_extension_path, | |
| 174 'info_module': info_module, | |
| 175 'resolve_paths': resolve_paths, | |
| 176 } | |
| 165 | 177 | 
| 166 for item in params['metadata'].items('bundles'): | 178 for item in params['metadata'].items('bundles'): | 
| 167 name, value = item | 179 name, value = item | 
| 168 base_item_path = os.path.dirname(item.source) | 180 base_item_path = os.path.dirname(item.source) | 
| 169 | 181 | 
| 170 bundle_file = os.path.relpath(os.path.join(base_item_path, name), | 182 bundle_file = os.path.relpath(os.path.join(base_item_path, name), | 
| 171 base_extension_path) | 183 base_extension_path) | 
| 172 entry_files = [os.path.join(base_item_path, module_path) | 184 entry_files = [os.path.join(base_item_path, module_path) | 
| 173 for module_path in value.split()] | 185 for module_path in value.split()] | 
| 174 files[bundle_file] = subprocess.check_output( | 186 configuration['bundles'].append({ | 
| 175 ['node', 'webpack_runner.js'], | 187 'bundle_name': bundle_file, | 
| 176 cwd=os.path.dirname(__file__), | 188 'entry_points': entry_files, | 
| 
Wladimir Palant
2017/10/04 10:38:06
I'd prefer an absolute path to webpack_runner.js i
 
kzar
2017/10/04 13:13:24
Done.
 | |
| 177 env={ | 189 }) | 
| 178 'EXTENSION_PATH': base_extension_path, | 190 | 
| 179 'ENTRY_POINTS': ' '.join(entry_files), | 191 cmd = ['node', os.path.join(os.path.dirname(__file__), 'webpack_runner.js')] | 
| 
Wladimir Palant
2017/10/04 10:38:06
This will break for paths containing spaces.
 
kzar
2017/10/04 13:13:24
Whoops, good point.
 | |
| 180 'BUNDLE_NAME': bundle_file, | 192 process = subprocess.Popen(cmd, stdout=subprocess.PIPE, | 
| 181 'RESOLVE_PATHS': resolve_paths, | 193 stdin=subprocess.PIPE) | 
| 182 'INFO_MODULE': info_module, | 194 output = process.communicate(input=toJson(configuration))[0] | 
| 
Wladimir Palant
2017/10/04 10:38:06
Why do we still need to pass data via environment
 
kzar
2017/10/04 13:13:24
Done.
 | |
| 183 'PATH': os.environ['PATH'] | 195 if process.returncode != 0: | 
| 184 } | 196 raise subprocess.CalledProcessError(process.returncode, cmd=cmd) | 
| 185 ) | 197 | 
| 186 | 198 bundles = json.loads(output) | 
| 187 | 199 for bundle in bundles: | 
| 188 def toJson(data): | 200 files[bundle] = bundles[bundle].encode('utf-8') | 
| 189 return json.dumps( | |
| 190 data, ensure_ascii=False, sort_keys=True, | |
| 191 indent=2, separators=(',', ': ') | |
| 192 ).encode('utf-8') + '\n' | |
| 193 | |
| 194 | |
| 195 def import_string_webext(data, key, source): | |
| 196 """Import a single translation from the source dictionary into data""" | |
| 197 data[key] = source | |
| 198 | |
| 199 | |
| 200 def import_string_gecko(data, key, value): | |
| 201 """Import Gecko-style locales into data. | |
| 202 | |
| 203 Only sets {'message': value} in the data-dictionary, after stripping | |
| 204 undesired Gecko-style access keys. | |
| 205 """ | |
| 206 match = re.search(r'^(.*?)\s*\(&.\)$', value) | |
| 207 if match: | |
| 208 value = match.group(1) | |
| 209 else: | |
| 210 index = value.find('&') | |
| 211 if index >= 0: | |
| 212 value = value[0:index] + value[index + 1:] | |
| 213 | |
| 214 data[key] = {'message': value} | |
| 215 | 201 | 
| 216 | 202 | 
| 217 def import_locales(params, files): | 203 def import_locales(params, files): | 
| 218 import localeTools | 204 for item in params['metadata'].items('import_locales'): | 
| 219 | 205 filename, keys = item | 
| 220 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as | 206 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source), | 
| 221 # separator instead. | 207 *filename.split('/'))): | 
| 222 convert_locale_code = lambda code: code.replace('-', '_') | 208 locale = sourceFile.split(os.path.sep)[-2] | 
| 223 | 209 targetFile = os.path.join('_locales', locale, 'messages.json') | 
| 224 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome | 210 data = json.loads(files.get(targetFile, '{}').decode('utf-8')) | 
| 225 # locales to themselves, merely with the dash as separator. | |
| 226 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal es} | |
| 227 | |
| 228 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping). | |
| 229 for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems (): | |
| 230 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale | |
| 231 | |
| 232 # Now convert values to Gecko locales (use Gecko => Crowdin mapping). | |
| 233 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()} | |
| 234 for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems() : | |
| 235 if crowdin_locale in reverse_mapping: | |
| 236 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale | |
| 237 | |
| 238 for target, source in locale_mapping.iteritems(): | |
| 239 targetFile = '_locales/%s/messages.json' % target | |
| 240 if not targetFile in files: | |
| 241 continue | |
| 242 | |
| 243 for item in params['metadata'].items('import_locales'): | |
| 244 fileName, keys = item | |
| 245 parts = map(lambda n: source if n == '*' else n, fileName.split('/') ) | |
| 246 sourceFile = os.path.join(os.path.dirname(item.source), *parts) | |
| 247 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom plete') | |
| 248 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker ): | |
| 249 continue | |
| 250 | |
| 251 data = json.loads(files[targetFile].decode('utf-8')) | |
| 252 | 211 | 
| 253 try: | 212 try: | 
| 254 # The WebExtensions (.json) and Gecko format provide | 213 with io.open(sourceFile, 'r', encoding='utf-8') as handle: | 
| 255 # translations differently and/or provide additional | 214 sourceData = json.load(handle) | 
| 256 # information like e.g. "placeholders". We want to adhere to | |
| 257 # that and preserve the addtional info. | |
| 258 if sourceFile.endswith('.json'): | |
| 259 with io.open(sourceFile, 'r', encoding='utf-8') as handle: | |
| 260 sourceData = json.load(handle) | |
| 261 import_string = import_string_webext | |
| 262 else: | |
| 263 sourceData = localeTools.readFile(sourceFile) | |
| 264 import_string = import_string_gecko | |
| 265 | 215 | 
| 266 # Resolve wildcard imports | 216 # Resolve wildcard imports | 
| 267 if keys == '*' or keys == '=*': | 217 if keys == '*': | 
| 268 importList = sourceData.keys() | 218 importList = sourceData.keys() | 
| 269 importList = filter(lambda k: not k.startswith('_'), importL ist) | 219 importList = filter(lambda k: not k.startswith('_'), importL ist) | 
| 270 if keys == '=*': | |
| 271 importList = map(lambda k: '=' + k, importList) | |
| 272 keys = ' '.join(importList) | 220 keys = ' '.join(importList) | 
| 273 | 221 | 
| 274 for stringID in keys.split(): | 222 for stringID in keys.split(): | 
| 275 noMangling = False | |
| 276 if stringID.startswith('='): | |
| 277 stringID = stringID[1:] | |
| 278 noMangling = True | |
| 279 | |
| 280 if stringID in sourceData: | 223 if stringID in sourceData: | 
| 281 if noMangling: | 224 if stringID in data: | 
| 282 key = re.sub(r'\W', '_', stringID) | 225 print ('Warning: locale string {} defined multiple' | 
| 283 else: | 226 ' times').format(stringID) | 
| 284 key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub( r'\W', '_', stringID) | 227 | 
| 285 if key in data: | 228 data[stringID] = sourceData[stringID] | 
| 286 print 'Warning: locale string %s defined multiple ti mes' % key | |
| 287 | |
| 288 import_string(data, key, sourceData[stringID]) | |
| 289 except Exception as e: | 229 except Exception as e: | 
| 290 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e) | 230 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e) | 
| 291 | 231 | 
| 292 files[targetFile] = toJson(data) | 232 files[targetFile] = toJson(data) | 
| 293 | 233 | 
| 294 | 234 | 
| 295 def truncate(text, length_limit): | 235 def truncate(text, length_limit): | 
| 296 if len(text) <= length_limit: | 236 if len(text) <= length_limit: | 
| 297 return text | 237 return text | 
| 298 return text[:length_limit - 1].rstrip() + u'\u2026' | 238 return text[:length_limit - 1].rstrip() + u'\u2026' | 
| 299 | 239 | 
| 300 | 240 | 
| 301 def fixTranslationsForCWS(files): | 241 def fix_translations_for_chrome(files): | 
| 302 # Chrome Web Store requires messages used in manifest.json to be present in | |
| 303 # all languages. It also enforces length limits for extension names and | |
| 304 # descriptions. | |
| 305 defaults = {} | 242 defaults = {} | 
| 306 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) | 243 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) | 
| 307 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): | 244 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): | 
| 308 name = match.group(1) | 245 name = match.group(1) | 
| 309 defaults[name] = data[name] | 246 defaults[name] = data[name] | 
| 310 | 247 | 
| 311 limits = {} | 248 limits = {} | 
| 312 manifest = json.loads(files['manifest.json']) | 249 manifest = json.loads(files['manifest.json']) | 
| 313 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): | 250 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): | 
| 314 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) | 251 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) | 
| 315 if match: | 252 if match: | 
| 316 limits[match.group(1)] = limit | 253 limits[match.group(1)] = limit | 
| 317 | 254 | 
| 318 for filename in files: | 255 for path in list(files): | 
| 319 if not filename.startswith('_locales/') or not filename.endswith('/messa ges.json'): | 256 match = re.search(r'^_locales/(?:es_(AR|CL|(MX))|[^/]+)/(.*)', path) | 
| 257 if not match: | |
| 320 continue | 258 continue | 
| 321 | 259 | 
| 322 data = json.loads(files[filename]) | 260 # The Chrome Web Store requires messages used in manifest.json to | 
| 323 for name, info in defaults.iteritems(): | 261 # be present in all languages, and enforces length limits on | 
| 324 data.setdefault(name, info) | 262 # extension name and description. | 
| 325 for name, limit in limits.iteritems(): | 263 is_latam, is_mexican, filename = match.groups() | 
| 326 if name in data: | 264 if filename == 'messages.json': | 
| 327 data[name]['message'] = truncate(data[name]['message'], limit) | 265 data = json.loads(files[path]) | 
| 328 files[filename] = toJson(data) | 266 for name, info in defaults.iteritems(): | 
| 267 data.setdefault(name, info) | |
| 268 for name, limit in limits.iteritems(): | |
| 269 info = data.get(name) | |
| 270 if info: | |
| 271 info['message'] = truncate(info['message'], limit) | |
| 272 files[path] = toJson(data) | |
| 273 | |
| 274 # Chrome combines Latin American dialects of Spanish into es-419. | |
| 275 if is_latam: | |
| 276 data = files.pop(path) | |
| 277 if is_mexican: | |
| 278 files['_locales/es_419/' + filename] = data | |
| 329 | 279 | 
| 330 | 280 | 
| 331 def signBinary(zipdata, keyFile): | 281 def signBinary(zipdata, keyFile): | 
| 332 from Crypto.Hash import SHA | 282 from Crypto.Hash import SHA | 
| 333 from Crypto.PublicKey import RSA | 283 from Crypto.PublicKey import RSA | 
| 334 from Crypto.Signature import PKCS1_v1_5 | 284 from Crypto.Signature import PKCS1_v1_5 | 
| 335 | 285 | 
| 336 try: | 286 try: | 
| 337 with open(keyFile, 'rb') as file: | 287 with open(keyFile, 'rb') as file: | 
| 338 key = RSA.importKey(file.read()) | 288 key = RSA.importKey(file.read()) | 
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 398 files.preprocess( | 348 files.preprocess( | 
| 399 [f for f, _ in metadata.items('preprocess')], | 349 [f for f, _ in metadata.items('preprocess')], | 
| 400 {'needsExt': True} | 350 {'needsExt': True} | 
| 401 ) | 351 ) | 
| 402 | 352 | 
| 403 if metadata.has_section('import_locales'): | 353 if metadata.has_section('import_locales'): | 
| 404 import_locales(params, files) | 354 import_locales(params, files) | 
| 405 | 355 | 
| 406 files['manifest.json'] = createManifest(params, files) | 356 files['manifest.json'] = createManifest(params, files) | 
| 407 if type == 'chrome': | 357 if type == 'chrome': | 
| 408 fixTranslationsForCWS(files) | 358 fix_translations_for_chrome(files) | 
| 409 | 359 | 
| 410 if devenv: | 360 if devenv: | 
| 411 import buildtools | 361 import buildtools | 
| 412 import random | 362 import random | 
| 413 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js') | 363 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js') | 
| 414 files['devenvVersion__'] = str(random.random()) | 364 files['devenvVersion__'] = str(random.random()) | 
| 415 | 365 | 
| 416 if metadata.has_option('general', 'testScripts'): | 366 if metadata.has_option('general', 'testScripts'): | 
| 417 files['qunit/index.html'] = createScriptPage( | 367 files['qunit/index.html'] = createScriptPage( | 
| 418 params, 'testIndex.html.tmpl', ('general', 'testScripts') | 368 params, 'testIndex.html.tmpl', ('general', 'testScripts') | 
| 419 ) | 369 ) | 
| 420 | 370 | 
| 421 zipdata = files.zipToString() | 371 zipdata = files.zipToString() | 
| 422 signature = None | 372 signature = None | 
| 423 pubkey = None | 373 pubkey = None | 
| 424 if keyFile != None: | 374 if keyFile != None: | 
| 425 signature = signBinary(zipdata, keyFile) | 375 signature = signBinary(zipdata, keyFile) | 
| 426 pubkey = getPublicKey(keyFile) | 376 pubkey = getPublicKey(keyFile) | 
| 427 writePackage(outFile, pubkey, signature, zipdata) | 377 writePackage(outFile, pubkey, signature, zipdata) | 
| LEFT | RIGHT |