Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: packagerChrome.py

Issue 29501558: Issue 5383 - Add tests for the Chrome and Firefox packagers (Closed)
Left Patch Set: Asserting PNGs Created Sept. 19, 2017, 10:02 a.m.
Right Patch Set: Addressing Vasily's comments Created Oct. 22, 2017, 11:11 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « package.json ('k') | tests/README.md » ('j') | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 # This Source Code Form is subject to the terms of the Mozilla Public 1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this 2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
4 4
5 import errno 5 import errno
6 import glob
6 import io 7 import io
7 import json 8 import json
8 import os 9 import os
9 import re 10 import re
10 from StringIO import StringIO
11 import struct 11 import struct
12 import subprocess
12 import sys 13 import sys
13 import collections 14 import random
14 15
15 from packager import (readMetadata, getDefaultFileName, getBuildVersion, 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion,
16 getTemplate, Files) 17 getTemplate, Files)
17 18
18 defaultLocale = 'en_US' 19 defaultLocale = 'en_US'
19 20
20 21
21 def getIgnoredFiles(params): 22 def getIgnoredFiles(params):
22 return {'store.description'} 23 return {'store.description'}
23 24
(...skipping 14 matching lines...) Expand all
38 39
39 def processFile(path, data, params): 40 def processFile(path, data, params):
40 # We don't change anything yet, this function currently only exists here so 41 # We don't change anything yet, this function currently only exists here so
41 # that it can be overridden if necessary. 42 # that it can be overridden if necessary.
42 return data 43 return data
43 44
44 45
45 def makeIcons(files, filenames): 46 def makeIcons(files, filenames):
46 icons = {} 47 icons = {}
47 for filename in filenames: 48 for filename in filenames:
48 png_tuple = (137, 'PNG', '\r\n', '\x1a', '\n') 49 try:
49 header = struct.unpack('>B3s2scc', files[filename][0:8]) 50 magic, width, height = struct.unpack_from('>8s8xii',
Sebastian Noack 2017/09/20 00:58:00 There is no reason to split the magic number into
tlucas 2017/09/20 08:52:49 Done.
50 assert header == png_tuple, '{} is no valid PNG.'.format(filename) 51 files[filename])
Vasily Kuznetsov 2017/09/19 17:52:26 Python's `assert` is not a reliable method to test
Sebastian Noack 2017/09/20 00:58:00 I'd like to add that the general idea of programmi
tlucas 2017/09/20 08:52:49 Thanks for the insight - restructured this code al
51 52 except struct.error:
52 width, height = struct.unpack('>ii', files[filename][16:24]) 53 magic = None
54 if magic != '\x89PNG\r\n\x1a\n':
55 raise Exception(filename + ' is no valid PNG.')
53 if(width != height): 56 if(width != height):
54 print >>sys.stderr, 'Warning: %s size is %ix%i, icon should be squar e' % (filename, width, height) 57 print >>sys.stderr, 'Warning: %s size is %ix%i, icon should be squar e' % (filename, width, height)
55 icons[width] = filename 58 icons[width] = filename
56 return icons 59 return icons
57 60
58 61
59 def createScriptPage(params, template_name, script_option): 62 def createScriptPage(params, template_name, script_option):
60 template = getTemplate(template_name, autoEscape=True) 63 template = getTemplate(template_name, autoEscape=True)
61 return template.render( 64 return template.render(
62 basename=params['metadata'].get('general', 'basename'), 65 basename=params['metadata'].get('general', 'basename'),
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
132 # Normalize JSON structure 135 # Normalize JSON structure
133 licenseComment = re.compile(r'/\*.*?\*/', re.S) 136 licenseComment = re.compile(r'/\*.*?\*/', re.S)
134 data = json.loads(re.sub(licenseComment, '', manifest, 1)) 137 data = json.loads(re.sub(licenseComment, '', manifest, 1))
135 if '_dummy' in data: 138 if '_dummy' in data:
136 del data['_dummy'] 139 del data['_dummy']
137 manifest = json.dumps(data, sort_keys=True, indent=2) 140 manifest = json.dumps(data, sort_keys=True, indent=2)
138 141
139 return manifest.encode('utf-8') 142 return manifest.encode('utf-8')
140 143
141 144
142 def convertJS(params, files):
143 output_files = collections.OrderedDict()
144 args = {}
145
146 for item in params['metadata'].items('convert_js'):
147 name, value = item
148 filename, arg = re.search(r'^(.*?)(?:\[(.*)\])?$', name).groups()
149 if arg is None:
150 output_files[filename] = (value.split(), item.source)
151 else:
152 args.setdefault(filename, {})[arg] = value
153
154 template = getTemplate('modules.js.tmpl')
155
156 for filename, (input_files, origin) in output_files.iteritems():
157 if '/' in filename and not files.isIncluded(filename):
158 continue
159
160 current_args = args.get(filename, {})
161 current_args['autoload'] = [module for module in
162 current_args.get('autoload', '').split(',')
163 if module != '']
164
165 base_dir = os.path.dirname(origin)
166 modules = []
167
168 for input_filename in input_files:
169 module_name = os.path.splitext(os.path.basename(input_filename))[0]
170 prefix = os.path.basename(os.path.dirname(input_filename))
171 if prefix != 'lib':
172 module_name = '{}_{}'.format(prefix, module_name)
173 with open(os.path.join(base_dir, input_filename), 'r') as file:
174 modules.append((module_name, file.read().decode('utf-8')))
175 files.pop(input_filename, None)
176
177 files[filename] = template.render(
178 args=current_args,
179 basename=params['metadata'].get('general', 'basename'),
180 modules=modules,
181 type=params['type'],
182 version=params['metadata'].get('general', 'version')
183 ).encode('utf-8')
184
185
186 def toJson(data): 145 def toJson(data):
187 return json.dumps( 146 return json.dumps(
188 data, ensure_ascii=False, sort_keys=True, 147 data, ensure_ascii=False, sort_keys=True,
189 indent=2, separators=(',', ': ') 148 indent=2, separators=(',', ': ')
190 ).encode('utf-8') + '\n' 149 ).encode('utf-8') + '\n'
191 150
192 151
193 def import_string_webext(data, key, source): 152 def create_bundles(params, files):
194 """Import a single translation from the source dictionary into data""" 153 base_extension_path = params['baseDir']
195 data[key] = source 154 info_templates = {
196 155 'chrome': 'chromeInfo.js.tmpl',
197 156 'edge': 'edgeInfo.js.tmpl',
198 def import_string_gecko(data, key, value): 157 'gecko': 'geckoInfo.js.tmpl'
199 """Import Gecko-style locales into data. 158 }
200 159
201 Only sets {'message': value} in the data-dictionary, after stripping 160 # Historically we didn't use relative paths when requiring modules, so in
202 undesired Gecko-style access keys. 161 # order for webpack to know where to find them we need to pass in a list of
203 """ 162 # resolve paths. Going forward we should always use relative paths, once we
204 match = re.search(r'^(.*?)\s*\(&.\)$', value) 163 # do that consistently this can be removed. See issues 5760, 5761 and 5762.
205 if match: 164 resolve_paths = [os.path.join(base_extension_path, dir, 'lib')
206 value = match.group(1) 165 for dir in ['', 'adblockpluscore', 'adblockplusui']]
207 else: 166
208 index = value.find('&') 167 info_template = getTemplate(info_templates[params['type']])
209 if index >= 0: 168 info_module = info_template.render(
210 value = value[0:index] + value[index + 1:] 169 basename=params['metadata'].get('general', 'basename'),
211 170 version=params['metadata'].get('general', 'version')
212 data[key] = {'message': value} 171 ).encode('utf-8')
172
173 configuration = {
174 'bundles': [],
175 'extension_path': base_extension_path,
176 'info_module': info_module,
177 'resolve_paths': resolve_paths,
178 }
179
180 for item in params['metadata'].items('bundles'):
181 name, value = item
182 base_item_path = os.path.dirname(item.source)
183
184 bundle_file = os.path.relpath(os.path.join(base_item_path, name),
185 base_extension_path)
186 entry_files = [os.path.join(base_item_path, module_path)
187 for module_path in value.split()]
188 configuration['bundles'].append({
189 'bundle_name': bundle_file,
190 'entry_points': entry_files,
191 })
192
193 cmd = ['node', os.path.join(os.path.dirname(__file__), 'webpack_runner.js')]
194 process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
195 stdin=subprocess.PIPE)
196 output = process.communicate(input=toJson(configuration))[0]
197 if process.returncode != 0:
198 raise subprocess.CalledProcessError(process.returncode, cmd=cmd)
199 output = json.loads(output)
200
201 # Clear the mapping for any files included in a bundle, to avoid them being
202 # duplicated in the build.
203 for to_ignore in output['included']:
204 files.pop(to_ignore, None)
205
206 for bundle in output['files']:
207 files[bundle] = output['files'][bundle].encode('utf-8')
213 208
214 209
215 def import_locales(params, files): 210 def import_locales(params, files):
216 import localeTools 211 for item in params['metadata'].items('import_locales'):
217 212 filename, keys = item
218 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as 213 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source),
219 # separator instead. 214 *filename.split('/'))):
220 convert_locale_code = lambda code: code.replace('-', '_') 215 locale = sourceFile.split(os.path.sep)[-2]
221 216 targetFile = os.path.join('_locales', locale, 'messages.json')
222 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome 217 data = json.loads(files.get(targetFile, '{}').decode('utf-8'))
223 # locales to themselves, merely with the dash as separator.
224 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal es}
225
226 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping).
227 for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems ():
228 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale
229
230 # Now convert values to Gecko locales (use Gecko => Crowdin mapping).
231 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()}
232 for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems() :
233 if crowdin_locale in reverse_mapping:
234 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale
235
236 for target, source in locale_mapping.iteritems():
237 targetFile = '_locales/%s/messages.json' % target
238 if not targetFile in files:
239 continue
240
241 for item in params['metadata'].items('import_locales'):
242 fileName, keys = item
243 parts = map(lambda n: source if n == '*' else n, fileName.split('/') )
244 sourceFile = os.path.join(os.path.dirname(item.source), *parts)
245 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom plete')
246 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker ):
247 continue
248
249 data = json.loads(files[targetFile].decode('utf-8'))
250 218
251 try: 219 try:
252 # The WebExtensions (.json) and Gecko format provide 220 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
253 # translations differently and/or provide additional 221 sourceData = json.load(handle)
254 # information like e.g. "placeholders". We want to adhere to
255 # that and preserve the addtional info.
256 if sourceFile.endswith('.json'):
257 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
258 sourceData = json.load(handle)
259 import_string = import_string_webext
260 else:
261 sourceData = localeTools.readFile(sourceFile)
262 import_string = import_string_gecko
263 222
264 # Resolve wildcard imports 223 # Resolve wildcard imports
265 if keys == '*' or keys == '=*': 224 if keys == '*':
266 importList = sourceData.keys() 225 importList = sourceData.keys()
267 importList = filter(lambda k: not k.startswith('_'), importL ist) 226 importList = filter(lambda k: not k.startswith('_'), importL ist)
268 if keys == '=*':
269 importList = map(lambda k: '=' + k, importList)
270 keys = ' '.join(importList) 227 keys = ' '.join(importList)
271 228
272 for stringID in keys.split(): 229 for stringID in keys.split():
273 noMangling = False
274 if stringID.startswith('='):
275 stringID = stringID[1:]
276 noMangling = True
277
278 if stringID in sourceData: 230 if stringID in sourceData:
279 if noMangling: 231 if stringID in data:
280 key = re.sub(r'\W', '_', stringID) 232 print ('Warning: locale string {} defined multiple'
281 else: 233 ' times').format(stringID)
282 key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub( r'\W', '_', stringID) 234
283 if key in data: 235 data[stringID] = sourceData[stringID]
284 print 'Warning: locale string %s defined multiple ti mes' % key
285
286 import_string(data, key, sourceData[stringID])
287 except Exception as e: 236 except Exception as e:
288 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e) 237 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e)
289 238
290 files[targetFile] = toJson(data) 239 files[targetFile] = toJson(data)
291 240
292 241
293 def truncate(text, length_limit): 242 def truncate(text, length_limit):
294 if len(text) <= length_limit: 243 if len(text) <= length_limit:
295 return text 244 return text
296 return text[:length_limit - 1].rstrip() + u'\u2026' 245 return text[:length_limit - 1].rstrip() + u'\u2026'
297 246
298 247
299 def fixTranslationsForCWS(files): 248 def fix_translations_for_chrome(files):
300 # Chrome Web Store requires messages used in manifest.json to be present in
301 # all languages. It also enforces length limits for extension names and
302 # descriptions.
303 defaults = {} 249 defaults = {}
304 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) 250 data = json.loads(files['_locales/%s/messages.json' % defaultLocale])
305 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): 251 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']):
306 name = match.group(1) 252 name = match.group(1)
307 defaults[name] = data[name] 253 defaults[name] = data[name]
308 254
309 limits = {} 255 limits = {}
310 manifest = json.loads(files['manifest.json']) 256 manifest = json.loads(files['manifest.json'])
311 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): 257 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)):
312 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) 258 match = re.search(r'__MSG_(\S+)__', manifest.get(key, ''))
313 if match: 259 if match:
314 limits[match.group(1)] = limit 260 limits[match.group(1)] = limit
315 261
316 for filename in files: 262 for path in list(files):
317 if not filename.startswith('_locales/') or not filename.endswith('/messa ges.json'): 263 match = re.search(r'^_locales/(?:es_(AR|CL|(MX))|[^/]+)/(.*)', path)
264 if not match:
318 continue 265 continue
319 266
320 data = json.loads(files[filename]) 267 # The Chrome Web Store requires messages used in manifest.json to
321 for name, info in defaults.iteritems(): 268 # be present in all languages, and enforces length limits on
322 data.setdefault(name, info) 269 # extension name and description.
323 for name, limit in limits.iteritems(): 270 is_latam, is_mexican, filename = match.groups()
324 if name in data: 271 if filename == 'messages.json':
325 data[name]['message'] = truncate(data[name]['message'], limit) 272 data = json.loads(files[path])
326 files[filename] = toJson(data) 273 for name, info in defaults.iteritems():
274 data.setdefault(name, info)
275 for name, limit in limits.iteritems():
276 info = data.get(name)
277 if info:
278 info['message'] = truncate(info['message'], limit)
279 files[path] = toJson(data)
280
281 # Chrome combines Latin American dialects of Spanish into es-419.
282 if is_latam:
283 data = files.pop(path)
284 if is_mexican:
285 files['_locales/es_419/' + filename] = data
327 286
328 287
329 def signBinary(zipdata, keyFile): 288 def signBinary(zipdata, keyFile):
330 from Crypto.Hash import SHA 289 from Crypto.Hash import SHA
331 from Crypto.PublicKey import RSA 290 from Crypto.PublicKey import RSA
332 from Crypto.Signature import PKCS1_v1_5 291 from Crypto.Signature import PKCS1_v1_5
333 292
334 try: 293 try:
335 with open(keyFile, 'rb') as file: 294 with open(keyFile, 'rb') as file:
336 key = RSA.importKey(file.read()) 295 key = RSA.importKey(file.read())
(...skipping 18 matching lines...) Expand all
355 file = open(outputFile, 'wb') 314 file = open(outputFile, 'wb')
356 else: 315 else:
357 file = outputFile 316 file = outputFile
358 if pubkey != None and signature != None: 317 if pubkey != None and signature != None:
359 file.write(struct.pack('<4sIII', 'Cr24', 2, len(pubkey), len(signature)) ) 318 file.write(struct.pack('<4sIII', 'Cr24', 2, len(pubkey), len(signature)) )
360 file.write(pubkey) 319 file.write(pubkey)
361 file.write(signature) 320 file.write(signature)
362 file.write(zipdata) 321 file.write(zipdata)
363 322
364 323
324 def add_devenv_requirements(files, metadata, params):
325 files.read(
326 os.path.join(os.path.dirname(__file__), 'chromeDevenvPoller__.js'),
327 relpath='devenvPoller__.js',
328 )
329 files['devenvVersion__'] = str(random.random())
330
331 if metadata.has_option('general', 'testScripts'):
332 files['qunit/index.html'] = createScriptPage(
333 params, 'testIndex.html.tmpl', ('general', 'testScripts')
334 )
335
336
365 def createBuild(baseDir, type='chrome', outFile=None, buildNum=None, releaseBuil d=False, keyFile=None, devenv=False): 337 def createBuild(baseDir, type='chrome', outFile=None, buildNum=None, releaseBuil d=False, keyFile=None, devenv=False):
366 metadata = readMetadata(baseDir, type) 338 metadata = readMetadata(baseDir, type)
367 version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum) 339 version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum)
368 340
369 if outFile == None: 341 if outFile == None:
370 if type == 'gecko-webext': 342 if type == 'gecko':
371 file_extension = 'xpi' 343 file_extension = 'xpi'
372 else: 344 else:
373 file_extension = 'crx' if keyFile else 'zip' 345 file_extension = 'crx' if keyFile else 'zip'
374 outFile = getDefaultFileName(metadata, version, file_extension) 346 outFile = getDefaultFileName(metadata, version, file_extension)
375 347
376 params = { 348 params = {
377 'type': type, 349 'type': type,
378 'baseDir': baseDir, 350 'baseDir': baseDir,
379 'releaseBuild': releaseBuild, 351 'releaseBuild': releaseBuild,
380 'version': version, 352 'version': version,
381 'devenv': devenv, 353 'devenv': devenv,
382 'metadata': metadata, 354 'metadata': metadata,
383 } 355 }
384 356
385 mapped = metadata.items('mapping') if metadata.has_section('mapping') else [ ] 357 mapped = metadata.items('mapping') if metadata.has_section('mapping') else [ ]
386 files = Files(getPackageFiles(params), getIgnoredFiles(params), 358 files = Files(getPackageFiles(params), getIgnoredFiles(params),
387 process=lambda path, data: processFile(path, data, params)) 359 process=lambda path, data: processFile(path, data, params))
388 360
389 files.readMappedFiles(mapped) 361 files.readMappedFiles(mapped)
390 files.read(baseDir, skip=[opt for opt, _ in mapped]) 362 files.read(baseDir, skip=[opt for opt, _ in mapped])
391 363
392 if metadata.has_section('convert_js'): 364 if metadata.has_section('bundles'):
393 convertJS(params, files) 365 create_bundles(params, files)
394 366
395 if metadata.has_section('preprocess'): 367 if metadata.has_section('preprocess'):
396 files.preprocess( 368 files.preprocess(
397 [f for f, _ in metadata.items('preprocess')], 369 [f for f, _ in metadata.items('preprocess')],
398 {'needsExt': True} 370 {'needsExt': True}
399 ) 371 )
400 372
401 if metadata.has_section('import_locales'): 373 if metadata.has_section('import_locales'):
402 import_locales(params, files) 374 import_locales(params, files)
403 375
404 files['manifest.json'] = createManifest(params, files) 376 files['manifest.json'] = createManifest(params, files)
405 if type == 'chrome': 377 if type == 'chrome':
406 fixTranslationsForCWS(files) 378 fix_translations_for_chrome(files)
407 379
408 if devenv: 380 if devenv:
409 import buildtools 381 add_devenv_requirements(files, metadata, params)
410 import random
411 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js')
412 files['devenvVersion__'] = str(random.random())
413
414 if metadata.has_option('general', 'testScripts'):
415 files['qunit/index.html'] = createScriptPage(
416 params, 'testIndex.html.tmpl', ('general', 'testScripts')
417 )
418 382
419 zipdata = files.zipToString() 383 zipdata = files.zipToString()
420 signature = None 384 signature = None
421 pubkey = None 385 pubkey = None
422 if keyFile != None: 386 if keyFile != None:
423 signature = signBinary(zipdata, keyFile) 387 signature = signBinary(zipdata, keyFile)
424 pubkey = getPublicKey(keyFile) 388 pubkey = getPublicKey(keyFile)
425 writePackage(outFile, pubkey, signature, zipdata) 389 writePackage(outFile, pubkey, signature, zipdata)
LEFTRIGHT

Powered by Google App Engine
This is Rietveld