Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: packagerChrome.py

Issue 29501558: Issue 5383 - Add tests for the Chrome and Firefox packagers (Closed)
Left Patch Set: Adressing comments Created Sept. 20, 2017, 8:47 a.m.
Right Patch Set: Addressing Vasily's comments Created Oct. 22, 2017, 11:11 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « package.json ('k') | tests/README.md » ('j') | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 # This Source Code Form is subject to the terms of the Mozilla Public 1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this 2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
4 4
5 import errno 5 import errno
6 import glob
6 import io 7 import io
7 import json 8 import json
8 import os 9 import os
9 import re 10 import re
10 from StringIO import StringIO
11 import struct 11 import struct
12 import subprocess
12 import sys 13 import sys
13 import collections 14 import random
14 15
15 from packager import (readMetadata, getDefaultFileName, getBuildVersion, 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion,
16 getTemplate, Files) 17 getTemplate, Files)
17 18
18 defaultLocale = 'en_US' 19 defaultLocale = 'en_US'
19 20
20 21
21 def getIgnoredFiles(params): 22 def getIgnoredFiles(params):
22 return {'store.description'} 23 return {'store.description'}
23 24
(...skipping 14 matching lines...) Expand all
38 39
39 def processFile(path, data, params): 40 def processFile(path, data, params):
40 # We don't change anything yet, this function currently only exists here so 41 # We don't change anything yet, this function currently only exists here so
41 # that it can be overridden if necessary. 42 # that it can be overridden if necessary.
42 return data 43 return data
43 44
44 45
45 def makeIcons(files, filenames): 46 def makeIcons(files, filenames):
46 icons = {} 47 icons = {}
47 for filename in filenames: 48 for filename in filenames:
48 magic, width, height = struct.unpack('>8s8xii', files[filename][:24]) 49 try:
Sebastian Noack 2017/09/20 21:36:10 This is different from my suggestion: 1. If you u
tlucas 2017/09/21 11:34:55 Sorry, i somehow overlooked this part from your co
50 magic, width, height = struct.unpack_from('>8s8xii',
51 files[filename])
52 except struct.error:
53 magic = None
49 if magic != '\x89PNG\r\n\x1a\n': 54 if magic != '\x89PNG\r\n\x1a\n':
50 raise TypeError('{} is no valid PNG.'.format(filename)) 55 raise Exception(filename + ' is no valid PNG.')
Sebastian Noack 2017/09/20 21:36:10 Nit; from https://adblockplus.org/coding-style#pyt
tlucas 2017/09/21 11:34:55 Done.
51 if(width != height): 56 if(width != height):
52 print >>sys.stderr, 'Warning: %s size is %ix%i, icon should be squar e' % (filename, width, height) 57 print >>sys.stderr, 'Warning: %s size is %ix%i, icon should be squar e' % (filename, width, height)
53 icons[width] = filename 58 icons[width] = filename
54 return icons 59 return icons
55 60
56 61
57 def createScriptPage(params, template_name, script_option): 62 def createScriptPage(params, template_name, script_option):
58 template = getTemplate(template_name, autoEscape=True) 63 template = getTemplate(template_name, autoEscape=True)
59 return template.render( 64 return template.render(
60 basename=params['metadata'].get('general', 'basename'), 65 basename=params['metadata'].get('general', 'basename'),
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
130 # Normalize JSON structure 135 # Normalize JSON structure
131 licenseComment = re.compile(r'/\*.*?\*/', re.S) 136 licenseComment = re.compile(r'/\*.*?\*/', re.S)
132 data = json.loads(re.sub(licenseComment, '', manifest, 1)) 137 data = json.loads(re.sub(licenseComment, '', manifest, 1))
133 if '_dummy' in data: 138 if '_dummy' in data:
134 del data['_dummy'] 139 del data['_dummy']
135 manifest = json.dumps(data, sort_keys=True, indent=2) 140 manifest = json.dumps(data, sort_keys=True, indent=2)
136 141
137 return manifest.encode('utf-8') 142 return manifest.encode('utf-8')
138 143
139 144
140 def convertJS(params, files):
141 output_files = collections.OrderedDict()
142 args = {}
143
144 for item in params['metadata'].items('convert_js'):
145 name, value = item
146 filename, arg = re.search(r'^(.*?)(?:\[(.*)\])?$', name).groups()
147 if arg is None:
148 output_files[filename] = (value.split(), item.source)
149 else:
150 args.setdefault(filename, {})[arg] = value
151
152 template = getTemplate('modules.js.tmpl')
153
154 for filename, (input_files, origin) in output_files.iteritems():
155 if '/' in filename and not files.isIncluded(filename):
156 continue
157
158 current_args = args.get(filename, {})
159 current_args['autoload'] = [module for module in
160 current_args.get('autoload', '').split(',')
161 if module != '']
162
163 base_dir = os.path.dirname(origin)
164 modules = []
165
166 for input_filename in input_files:
167 module_name = os.path.splitext(os.path.basename(input_filename))[0]
168 prefix = os.path.basename(os.path.dirname(input_filename))
169 if prefix != 'lib':
170 module_name = '{}_{}'.format(prefix, module_name)
171 with open(os.path.join(base_dir, input_filename), 'r') as file:
172 modules.append((module_name, file.read().decode('utf-8')))
173 files.pop(input_filename, None)
174
175 files[filename] = template.render(
176 args=current_args,
177 basename=params['metadata'].get('general', 'basename'),
178 modules=modules,
179 type=params['type'],
180 version=params['metadata'].get('general', 'version')
181 ).encode('utf-8')
182
183
184 def toJson(data): 145 def toJson(data):
185 return json.dumps( 146 return json.dumps(
186 data, ensure_ascii=False, sort_keys=True, 147 data, ensure_ascii=False, sort_keys=True,
187 indent=2, separators=(',', ': ') 148 indent=2, separators=(',', ': ')
188 ).encode('utf-8') + '\n' 149 ).encode('utf-8') + '\n'
189 150
190 151
191 def import_string_webext(data, key, source): 152 def create_bundles(params, files):
192 """Import a single translation from the source dictionary into data""" 153 base_extension_path = params['baseDir']
193 data[key] = source 154 info_templates = {
194 155 'chrome': 'chromeInfo.js.tmpl',
195 156 'edge': 'edgeInfo.js.tmpl',
196 def import_string_gecko(data, key, value): 157 'gecko': 'geckoInfo.js.tmpl'
197 """Import Gecko-style locales into data. 158 }
198 159
199 Only sets {'message': value} in the data-dictionary, after stripping 160 # Historically we didn't use relative paths when requiring modules, so in
200 undesired Gecko-style access keys. 161 # order for webpack to know where to find them we need to pass in a list of
201 """ 162 # resolve paths. Going forward we should always use relative paths, once we
202 match = re.search(r'^(.*?)\s*\(&.\)$', value) 163 # do that consistently this can be removed. See issues 5760, 5761 and 5762.
203 if match: 164 resolve_paths = [os.path.join(base_extension_path, dir, 'lib')
204 value = match.group(1) 165 for dir in ['', 'adblockpluscore', 'adblockplusui']]
205 else: 166
206 index = value.find('&') 167 info_template = getTemplate(info_templates[params['type']])
207 if index >= 0: 168 info_module = info_template.render(
208 value = value[0:index] + value[index + 1:] 169 basename=params['metadata'].get('general', 'basename'),
209 170 version=params['metadata'].get('general', 'version')
210 data[key] = {'message': value} 171 ).encode('utf-8')
172
173 configuration = {
174 'bundles': [],
175 'extension_path': base_extension_path,
176 'info_module': info_module,
177 'resolve_paths': resolve_paths,
178 }
179
180 for item in params['metadata'].items('bundles'):
181 name, value = item
182 base_item_path = os.path.dirname(item.source)
183
184 bundle_file = os.path.relpath(os.path.join(base_item_path, name),
185 base_extension_path)
186 entry_files = [os.path.join(base_item_path, module_path)
187 for module_path in value.split()]
188 configuration['bundles'].append({
189 'bundle_name': bundle_file,
190 'entry_points': entry_files,
191 })
192
193 cmd = ['node', os.path.join(os.path.dirname(__file__), 'webpack_runner.js')]
194 process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
195 stdin=subprocess.PIPE)
196 output = process.communicate(input=toJson(configuration))[0]
197 if process.returncode != 0:
198 raise subprocess.CalledProcessError(process.returncode, cmd=cmd)
199 output = json.loads(output)
200
201 # Clear the mapping for any files included in a bundle, to avoid them being
202 # duplicated in the build.
203 for to_ignore in output['included']:
204 files.pop(to_ignore, None)
205
206 for bundle in output['files']:
207 files[bundle] = output['files'][bundle].encode('utf-8')
211 208
212 209
213 def import_locales(params, files): 210 def import_locales(params, files):
214 import localeTools 211 for item in params['metadata'].items('import_locales'):
215 212 filename, keys = item
216 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as 213 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source),
217 # separator instead. 214 *filename.split('/'))):
218 convert_locale_code = lambda code: code.replace('-', '_') 215 locale = sourceFile.split(os.path.sep)[-2]
219 216 targetFile = os.path.join('_locales', locale, 'messages.json')
220 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome 217 data = json.loads(files.get(targetFile, '{}').decode('utf-8'))
221 # locales to themselves, merely with the dash as separator.
222 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal es}
223
224 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping).
225 for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems ():
226 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale
227
228 # Now convert values to Gecko locales (use Gecko => Crowdin mapping).
229 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()}
230 for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems() :
231 if crowdin_locale in reverse_mapping:
232 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale
233
234 for target, source in locale_mapping.iteritems():
235 targetFile = '_locales/%s/messages.json' % target
236 if not targetFile in files:
237 continue
238
239 for item in params['metadata'].items('import_locales'):
240 fileName, keys = item
241 parts = map(lambda n: source if n == '*' else n, fileName.split('/') )
242 sourceFile = os.path.join(os.path.dirname(item.source), *parts)
243 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom plete')
244 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker ):
245 continue
246
247 data = json.loads(files[targetFile].decode('utf-8'))
248 218
249 try: 219 try:
250 # The WebExtensions (.json) and Gecko format provide 220 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
251 # translations differently and/or provide additional 221 sourceData = json.load(handle)
252 # information like e.g. "placeholders". We want to adhere to
253 # that and preserve the addtional info.
254 if sourceFile.endswith('.json'):
255 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
256 sourceData = json.load(handle)
257 import_string = import_string_webext
258 else:
259 sourceData = localeTools.readFile(sourceFile)
260 import_string = import_string_gecko
261 222
262 # Resolve wildcard imports 223 # Resolve wildcard imports
263 if keys == '*' or keys == '=*': 224 if keys == '*':
264 importList = sourceData.keys() 225 importList = sourceData.keys()
265 importList = filter(lambda k: not k.startswith('_'), importL ist) 226 importList = filter(lambda k: not k.startswith('_'), importL ist)
266 if keys == '=*':
267 importList = map(lambda k: '=' + k, importList)
268 keys = ' '.join(importList) 227 keys = ' '.join(importList)
269 228
270 for stringID in keys.split(): 229 for stringID in keys.split():
271 noMangling = False
272 if stringID.startswith('='):
273 stringID = stringID[1:]
274 noMangling = True
275
276 if stringID in sourceData: 230 if stringID in sourceData:
277 if noMangling: 231 if stringID in data:
278 key = re.sub(r'\W', '_', stringID) 232 print ('Warning: locale string {} defined multiple'
279 else: 233 ' times').format(stringID)
280 key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub( r'\W', '_', stringID) 234
281 if key in data: 235 data[stringID] = sourceData[stringID]
282 print 'Warning: locale string %s defined multiple ti mes' % key
283
284 import_string(data, key, sourceData[stringID])
285 except Exception as e: 236 except Exception as e:
286 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e) 237 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e)
287 238
288 files[targetFile] = toJson(data) 239 files[targetFile] = toJson(data)
289 240
290 241
291 def truncate(text, length_limit): 242 def truncate(text, length_limit):
292 if len(text) <= length_limit: 243 if len(text) <= length_limit:
293 return text 244 return text
294 return text[:length_limit - 1].rstrip() + u'\u2026' 245 return text[:length_limit - 1].rstrip() + u'\u2026'
295 246
296 247
297 def fixTranslationsForCWS(files): 248 def fix_translations_for_chrome(files):
298 # Chrome Web Store requires messages used in manifest.json to be present in
299 # all languages. It also enforces length limits for extension names and
300 # descriptions.
301 defaults = {} 249 defaults = {}
302 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) 250 data = json.loads(files['_locales/%s/messages.json' % defaultLocale])
303 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): 251 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']):
304 name = match.group(1) 252 name = match.group(1)
305 defaults[name] = data[name] 253 defaults[name] = data[name]
306 254
307 limits = {} 255 limits = {}
308 manifest = json.loads(files['manifest.json']) 256 manifest = json.loads(files['manifest.json'])
309 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): 257 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)):
310 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) 258 match = re.search(r'__MSG_(\S+)__', manifest.get(key, ''))
311 if match: 259 if match:
312 limits[match.group(1)] = limit 260 limits[match.group(1)] = limit
313 261
314 for filename in files: 262 for path in list(files):
315 if not filename.startswith('_locales/') or not filename.endswith('/messa ges.json'): 263 match = re.search(r'^_locales/(?:es_(AR|CL|(MX))|[^/]+)/(.*)', path)
264 if not match:
316 continue 265 continue
317 266
318 data = json.loads(files[filename]) 267 # The Chrome Web Store requires messages used in manifest.json to
319 for name, info in defaults.iteritems(): 268 # be present in all languages, and enforces length limits on
320 data.setdefault(name, info) 269 # extension name and description.
321 for name, limit in limits.iteritems(): 270 is_latam, is_mexican, filename = match.groups()
322 if name in data: 271 if filename == 'messages.json':
323 data[name]['message'] = truncate(data[name]['message'], limit) 272 data = json.loads(files[path])
324 files[filename] = toJson(data) 273 for name, info in defaults.iteritems():
274 data.setdefault(name, info)
275 for name, limit in limits.iteritems():
276 info = data.get(name)
277 if info:
278 info['message'] = truncate(info['message'], limit)
279 files[path] = toJson(data)
280
281 # Chrome combines Latin American dialects of Spanish into es-419.
282 if is_latam:
283 data = files.pop(path)
284 if is_mexican:
285 files['_locales/es_419/' + filename] = data
325 286
326 287
327 def signBinary(zipdata, keyFile): 288 def signBinary(zipdata, keyFile):
328 from Crypto.Hash import SHA 289 from Crypto.Hash import SHA
329 from Crypto.PublicKey import RSA 290 from Crypto.PublicKey import RSA
330 from Crypto.Signature import PKCS1_v1_5 291 from Crypto.Signature import PKCS1_v1_5
331 292
332 try: 293 try:
333 with open(keyFile, 'rb') as file: 294 with open(keyFile, 'rb') as file:
334 key = RSA.importKey(file.read()) 295 key = RSA.importKey(file.read())
(...skipping 18 matching lines...) Expand all
353 file = open(outputFile, 'wb') 314 file = open(outputFile, 'wb')
354 else: 315 else:
355 file = outputFile 316 file = outputFile
356 if pubkey != None and signature != None: 317 if pubkey != None and signature != None:
357 file.write(struct.pack('<4sIII', 'Cr24', 2, len(pubkey), len(signature)) ) 318 file.write(struct.pack('<4sIII', 'Cr24', 2, len(pubkey), len(signature)) )
358 file.write(pubkey) 319 file.write(pubkey)
359 file.write(signature) 320 file.write(signature)
360 file.write(zipdata) 321 file.write(zipdata)
361 322
362 323
324 def add_devenv_requirements(files, metadata, params):
325 files.read(
326 os.path.join(os.path.dirname(__file__), 'chromeDevenvPoller__.js'),
327 relpath='devenvPoller__.js',
328 )
329 files['devenvVersion__'] = str(random.random())
330
331 if metadata.has_option('general', 'testScripts'):
332 files['qunit/index.html'] = createScriptPage(
333 params, 'testIndex.html.tmpl', ('general', 'testScripts')
334 )
335
336
363 def createBuild(baseDir, type='chrome', outFile=None, buildNum=None, releaseBuil d=False, keyFile=None, devenv=False): 337 def createBuild(baseDir, type='chrome', outFile=None, buildNum=None, releaseBuil d=False, keyFile=None, devenv=False):
364 metadata = readMetadata(baseDir, type) 338 metadata = readMetadata(baseDir, type)
365 version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum) 339 version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum)
366 340
367 if outFile == None: 341 if outFile == None:
368 if type == 'gecko-webext': 342 if type == 'gecko':
369 file_extension = 'xpi' 343 file_extension = 'xpi'
370 else: 344 else:
371 file_extension = 'crx' if keyFile else 'zip' 345 file_extension = 'crx' if keyFile else 'zip'
372 outFile = getDefaultFileName(metadata, version, file_extension) 346 outFile = getDefaultFileName(metadata, version, file_extension)
373 347
374 params = { 348 params = {
375 'type': type, 349 'type': type,
376 'baseDir': baseDir, 350 'baseDir': baseDir,
377 'releaseBuild': releaseBuild, 351 'releaseBuild': releaseBuild,
378 'version': version, 352 'version': version,
379 'devenv': devenv, 353 'devenv': devenv,
380 'metadata': metadata, 354 'metadata': metadata,
381 } 355 }
382 356
383 mapped = metadata.items('mapping') if metadata.has_section('mapping') else [ ] 357 mapped = metadata.items('mapping') if metadata.has_section('mapping') else [ ]
384 files = Files(getPackageFiles(params), getIgnoredFiles(params), 358 files = Files(getPackageFiles(params), getIgnoredFiles(params),
385 process=lambda path, data: processFile(path, data, params)) 359 process=lambda path, data: processFile(path, data, params))
386 360
387 files.readMappedFiles(mapped) 361 files.readMappedFiles(mapped)
388 files.read(baseDir, skip=[opt for opt, _ in mapped]) 362 files.read(baseDir, skip=[opt for opt, _ in mapped])
389 363
390 if metadata.has_section('convert_js'): 364 if metadata.has_section('bundles'):
391 convertJS(params, files) 365 create_bundles(params, files)
392 366
393 if metadata.has_section('preprocess'): 367 if metadata.has_section('preprocess'):
394 files.preprocess( 368 files.preprocess(
395 [f for f, _ in metadata.items('preprocess')], 369 [f for f, _ in metadata.items('preprocess')],
396 {'needsExt': True} 370 {'needsExt': True}
397 ) 371 )
398 372
399 if metadata.has_section('import_locales'): 373 if metadata.has_section('import_locales'):
400 import_locales(params, files) 374 import_locales(params, files)
401 375
402 files['manifest.json'] = createManifest(params, files) 376 files['manifest.json'] = createManifest(params, files)
403 if type == 'chrome': 377 if type == 'chrome':
404 fixTranslationsForCWS(files) 378 fix_translations_for_chrome(files)
405 379
406 if devenv: 380 if devenv:
407 import buildtools 381 add_devenv_requirements(files, metadata, params)
408 import random
409 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js')
410 files['devenvVersion__'] = str(random.random())
411
412 if metadata.has_option('general', 'testScripts'):
413 files['qunit/index.html'] = createScriptPage(
414 params, 'testIndex.html.tmpl', ('general', 'testScripts')
415 )
416 382
417 zipdata = files.zipToString() 383 zipdata = files.zipToString()
418 signature = None 384 signature = None
419 pubkey = None 385 pubkey = None
420 if keyFile != None: 386 if keyFile != None:
421 signature = signBinary(zipdata, keyFile) 387 signature = signBinary(zipdata, keyFile)
422 pubkey = getPublicKey(keyFile) 388 pubkey = getPublicKey(keyFile)
423 writePackage(outFile, pubkey, signature, zipdata) 389 writePackage(outFile, pubkey, signature, zipdata)
LEFTRIGHT

Powered by Google App Engine
This is Rietveld