Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: packagerChrome.py

Issue 29501558: Issue 5383 - Add tests for the Chrome and Firefox packagers (Closed)
Left Patch Set: Readme, difflib, buildnum Created Sept. 13, 2017, 1:29 p.m.
Right Patch Set: Addressing Vasily's comments Created Oct. 22, 2017, 11:11 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « package.json ('k') | tests/README.md » ('j') | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 # This Source Code Form is subject to the terms of the Mozilla Public 1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this 2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
4 4
5 import errno 5 import errno
6 import glob
6 import io 7 import io
7 import json 8 import json
8 import os 9 import os
9 import re 10 import re
10 from StringIO import StringIO
11 import struct 11 import struct
12 import subprocess
12 import sys 13 import sys
13 import collections 14 import random
14 15
15 from packager import (readMetadata, getDefaultFileName, getBuildVersion, 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion,
16 getTemplate, Files) 17 getTemplate, Files)
17 18
18 defaultLocale = 'en_US' 19 defaultLocale = 'en_US'
19 20
20 21
21 def getIgnoredFiles(params): 22 def getIgnoredFiles(params):
22 return {'store.description'} 23 return {'store.description'}
23 24
(...skipping 14 matching lines...) Expand all
38 39
39 def processFile(path, data, params): 40 def processFile(path, data, params):
40 # We don't change anything yet, this function currently only exists here so 41 # We don't change anything yet, this function currently only exists here so
41 # that it can be overridden if necessary. 42 # that it can be overridden if necessary.
42 return data 43 return data
43 44
44 45
45 def makeIcons(files, filenames): 46 def makeIcons(files, filenames):
46 icons = {} 47 icons = {}
47 for filename in filenames: 48 for filename in filenames:
48 width, height = struct.unpack('>ii', files[filename][16:24]) 49 try:
Sebastian Noack 2017/09/13 19:18:29 I wonder whether it would be worth to check for th
tlucas 2017/09/14 09:39:53 Do we strictly require the icons to be pngs? If ye
Sebastian Noack 2017/09/14 16:51:59 We only use PNG. But technically, some other forma
50 magic, width, height = struct.unpack_from('>8s8xii',
51 files[filename])
52 except struct.error:
53 magic = None
54 if magic != '\x89PNG\r\n\x1a\n':
55 raise Exception(filename + ' is no valid PNG.')
49 if(width != height): 56 if(width != height):
50 print >>sys.stderr, 'Warning: %s size is %ix%i, icon should be squar e' % (filename, width, height) 57 print >>sys.stderr, 'Warning: %s size is %ix%i, icon should be squar e' % (filename, width, height)
51 icons[width] = filename 58 icons[width] = filename
52 return icons 59 return icons
53 60
54 61
55 def createScriptPage(params, template_name, script_option): 62 def createScriptPage(params, template_name, script_option):
56 template = getTemplate(template_name, autoEscape=True) 63 template = getTemplate(template_name, autoEscape=True)
57 return template.render( 64 return template.render(
58 basename=params['metadata'].get('general', 'basename'), 65 basename=params['metadata'].get('general', 'basename'),
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
128 # Normalize JSON structure 135 # Normalize JSON structure
129 licenseComment = re.compile(r'/\*.*?\*/', re.S) 136 licenseComment = re.compile(r'/\*.*?\*/', re.S)
130 data = json.loads(re.sub(licenseComment, '', manifest, 1)) 137 data = json.loads(re.sub(licenseComment, '', manifest, 1))
131 if '_dummy' in data: 138 if '_dummy' in data:
132 del data['_dummy'] 139 del data['_dummy']
133 manifest = json.dumps(data, sort_keys=True, indent=2) 140 manifest = json.dumps(data, sort_keys=True, indent=2)
134 141
135 return manifest.encode('utf-8') 142 return manifest.encode('utf-8')
136 143
137 144
138 def convertJS(params, files):
139 output_files = collections.OrderedDict()
140 args = {}
141
142 for item in params['metadata'].items('convert_js'):
143 name, value = item
144 filename, arg = re.search(r'^(.*?)(?:\[(.*)\])?$', name).groups()
145 if arg is None:
146 output_files[filename] = (value.split(), item.source)
147 else:
148 args.setdefault(filename, {})[arg] = value
149
150 template = getTemplate('modules.js.tmpl')
151
152 for filename, (input_files, origin) in output_files.iteritems():
153 if '/' in filename and not files.isIncluded(filename):
154 continue
155
156 current_args = args.get(filename, {})
157 current_args['autoload'] = [module for module in
158 current_args.get('autoload', '').split(',')
159 if module != '']
160
161 base_dir = os.path.dirname(origin)
162 modules = []
163
164 for input_filename in input_files:
165 module_name = os.path.splitext(os.path.basename(input_filename))[0]
166 prefix = os.path.basename(os.path.dirname(input_filename))
167 if prefix != 'lib':
168 module_name = '{}_{}'.format(prefix, module_name)
169 with open(os.path.join(base_dir, input_filename), 'r') as file:
170 modules.append((module_name, file.read().decode('utf-8')))
171 files.pop(input_filename, None)
172
173 files[filename] = template.render(
174 args=current_args,
175 basename=params['metadata'].get('general', 'basename'),
176 modules=modules,
177 type=params['type'],
178 version=params['metadata'].get('general', 'version')
179 ).encode('utf-8')
180
181
182 def toJson(data): 145 def toJson(data):
183 return json.dumps( 146 return json.dumps(
184 data, ensure_ascii=False, sort_keys=True, 147 data, ensure_ascii=False, sort_keys=True,
185 indent=2, separators=(',', ': ') 148 indent=2, separators=(',', ': ')
186 ).encode('utf-8') + '\n' 149 ).encode('utf-8') + '\n'
187 150
188 151
189 def import_string_webext(data, key, source): 152 def create_bundles(params, files):
190 """Import a single translation from the source dictionary into data""" 153 base_extension_path = params['baseDir']
191 data[key] = source 154 info_templates = {
192 155 'chrome': 'chromeInfo.js.tmpl',
193 156 'edge': 'edgeInfo.js.tmpl',
194 def import_string_gecko(data, key, value): 157 'gecko': 'geckoInfo.js.tmpl'
195 """Import Gecko-style locales into data. 158 }
196 159
197 Only sets {'message': value} in the data-dictionary, after stripping 160 # Historically we didn't use relative paths when requiring modules, so in
198 undesired Gecko-style access keys. 161 # order for webpack to know where to find them we need to pass in a list of
199 """ 162 # resolve paths. Going forward we should always use relative paths, once we
200 match = re.search(r'^(.*?)\s*\(&.\)$', value) 163 # do that consistently this can be removed. See issues 5760, 5761 and 5762.
201 if match: 164 resolve_paths = [os.path.join(base_extension_path, dir, 'lib')
202 value = match.group(1) 165 for dir in ['', 'adblockpluscore', 'adblockplusui']]
203 else: 166
204 index = value.find('&') 167 info_template = getTemplate(info_templates[params['type']])
205 if index >= 0: 168 info_module = info_template.render(
206 value = value[0:index] + value[index + 1:] 169 basename=params['metadata'].get('general', 'basename'),
207 170 version=params['metadata'].get('general', 'version')
208 data[key] = {'message': value} 171 ).encode('utf-8')
172
173 configuration = {
174 'bundles': [],
175 'extension_path': base_extension_path,
176 'info_module': info_module,
177 'resolve_paths': resolve_paths,
178 }
179
180 for item in params['metadata'].items('bundles'):
181 name, value = item
182 base_item_path = os.path.dirname(item.source)
183
184 bundle_file = os.path.relpath(os.path.join(base_item_path, name),
185 base_extension_path)
186 entry_files = [os.path.join(base_item_path, module_path)
187 for module_path in value.split()]
188 configuration['bundles'].append({
189 'bundle_name': bundle_file,
190 'entry_points': entry_files,
191 })
192
193 cmd = ['node', os.path.join(os.path.dirname(__file__), 'webpack_runner.js')]
194 process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
195 stdin=subprocess.PIPE)
196 output = process.communicate(input=toJson(configuration))[0]
197 if process.returncode != 0:
198 raise subprocess.CalledProcessError(process.returncode, cmd=cmd)
199 output = json.loads(output)
200
201 # Clear the mapping for any files included in a bundle, to avoid them being
202 # duplicated in the build.
203 for to_ignore in output['included']:
204 files.pop(to_ignore, None)
205
206 for bundle in output['files']:
207 files[bundle] = output['files'][bundle].encode('utf-8')
209 208
210 209
211 def import_locales(params, files): 210 def import_locales(params, files):
212 import localeTools 211 for item in params['metadata'].items('import_locales'):
213 212 filename, keys = item
214 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as 213 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source),
215 # separator instead. 214 *filename.split('/'))):
216 convert_locale_code = lambda code: code.replace('-', '_') 215 locale = sourceFile.split(os.path.sep)[-2]
217 216 targetFile = os.path.join('_locales', locale, 'messages.json')
218 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome 217 data = json.loads(files.get(targetFile, '{}').decode('utf-8'))
219 # locales to themselves, merely with the dash as separator.
220 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal es}
221
222 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping).
223 for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems ():
224 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale
225
226 # Now convert values to Gecko locales (use Gecko => Crowdin mapping).
227 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()}
228 for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems() :
229 if crowdin_locale in reverse_mapping:
230 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale
231
232 for target, source in locale_mapping.iteritems():
233 targetFile = '_locales/%s/messages.json' % target
234 if not targetFile in files:
235 continue
236
237 for item in params['metadata'].items('import_locales'):
238 fileName, keys = item
239 parts = map(lambda n: source if n == '*' else n, fileName.split('/') )
240 sourceFile = os.path.join(os.path.dirname(item.source), *parts)
241 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom plete')
242 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker ):
243 continue
244
245 data = json.loads(files[targetFile].decode('utf-8'))
246 218
247 try: 219 try:
248 # The WebExtensions (.json) and Gecko format provide 220 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
249 # translations differently and/or provide additional 221 sourceData = json.load(handle)
250 # information like e.g. "placeholders". We want to adhere to
251 # that and preserve the addtional info.
252 if sourceFile.endswith('.json'):
253 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
254 sourceData = json.load(handle)
255 import_string = import_string_webext
256 else:
257 sourceData = localeTools.readFile(sourceFile)
258 import_string = import_string_gecko
259 222
260 # Resolve wildcard imports 223 # Resolve wildcard imports
261 if keys == '*' or keys == '=*': 224 if keys == '*':
262 importList = sourceData.keys() 225 importList = sourceData.keys()
263 importList = filter(lambda k: not k.startswith('_'), importL ist) 226 importList = filter(lambda k: not k.startswith('_'), importL ist)
264 if keys == '=*':
265 importList = map(lambda k: '=' + k, importList)
266 keys = ' '.join(importList) 227 keys = ' '.join(importList)
267 228
268 for stringID in keys.split(): 229 for stringID in keys.split():
269 noMangling = False
270 if stringID.startswith('='):
271 stringID = stringID[1:]
272 noMangling = True
273
274 if stringID in sourceData: 230 if stringID in sourceData:
275 if noMangling: 231 if stringID in data:
276 key = re.sub(r'\W', '_', stringID) 232 print ('Warning: locale string {} defined multiple'
277 else: 233 ' times').format(stringID)
278 key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub( r'\W', '_', stringID) 234
279 if key in data: 235 data[stringID] = sourceData[stringID]
280 print 'Warning: locale string %s defined multiple ti mes' % key
281
282 import_string(data, key, sourceData[stringID])
283 except Exception as e: 236 except Exception as e:
284 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e) 237 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e)
285 238
286 files[targetFile] = toJson(data) 239 files[targetFile] = toJson(data)
287 240
288 241
289 def truncate(text, length_limit): 242 def truncate(text, length_limit):
290 if len(text) <= length_limit: 243 if len(text) <= length_limit:
291 return text 244 return text
292 return text[:length_limit - 1].rstrip() + u'\u2026' 245 return text[:length_limit - 1].rstrip() + u'\u2026'
293 246
294 247
295 def fixTranslationsForCWS(files): 248 def fix_translations_for_chrome(files):
296 # Chrome Web Store requires messages used in manifest.json to be present in
297 # all languages. It also enforces length limits for extension names and
298 # descriptions.
299 defaults = {} 249 defaults = {}
300 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) 250 data = json.loads(files['_locales/%s/messages.json' % defaultLocale])
301 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): 251 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']):
302 name = match.group(1) 252 name = match.group(1)
303 defaults[name] = data[name] 253 defaults[name] = data[name]
304 254
305 limits = {} 255 limits = {}
306 manifest = json.loads(files['manifest.json']) 256 manifest = json.loads(files['manifest.json'])
307 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): 257 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)):
308 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) 258 match = re.search(r'__MSG_(\S+)__', manifest.get(key, ''))
309 if match: 259 if match:
310 limits[match.group(1)] = limit 260 limits[match.group(1)] = limit
311 261
312 for filename in files: 262 for path in list(files):
313 if not filename.startswith('_locales/') or not filename.endswith('/messa ges.json'): 263 match = re.search(r'^_locales/(?:es_(AR|CL|(MX))|[^/]+)/(.*)', path)
264 if not match:
314 continue 265 continue
315 266
316 data = json.loads(files[filename]) 267 # The Chrome Web Store requires messages used in manifest.json to
317 for name, info in defaults.iteritems(): 268 # be present in all languages, and enforces length limits on
318 data.setdefault(name, info) 269 # extension name and description.
319 for name, limit in limits.iteritems(): 270 is_latam, is_mexican, filename = match.groups()
320 if name in data: 271 if filename == 'messages.json':
321 data[name]['message'] = truncate(data[name]['message'], limit) 272 data = json.loads(files[path])
322 files[filename] = toJson(data) 273 for name, info in defaults.iteritems():
274 data.setdefault(name, info)
275 for name, limit in limits.iteritems():
276 info = data.get(name)
277 if info:
278 info['message'] = truncate(info['message'], limit)
279 files[path] = toJson(data)
280
281 # Chrome combines Latin American dialects of Spanish into es-419.
282 if is_latam:
283 data = files.pop(path)
284 if is_mexican:
285 files['_locales/es_419/' + filename] = data
323 286
324 287
325 def signBinary(zipdata, keyFile): 288 def signBinary(zipdata, keyFile):
326 from Crypto.Hash import SHA 289 from Crypto.Hash import SHA
327 from Crypto.PublicKey import RSA 290 from Crypto.PublicKey import RSA
328 from Crypto.Signature import PKCS1_v1_5 291 from Crypto.Signature import PKCS1_v1_5
329 292
330 try: 293 try:
331 with open(keyFile, 'rb') as file: 294 with open(keyFile, 'rb') as file:
332 key = RSA.importKey(file.read()) 295 key = RSA.importKey(file.read())
(...skipping 18 matching lines...) Expand all
351 file = open(outputFile, 'wb') 314 file = open(outputFile, 'wb')
352 else: 315 else:
353 file = outputFile 316 file = outputFile
354 if pubkey != None and signature != None: 317 if pubkey != None and signature != None:
355 file.write(struct.pack('<4sIII', 'Cr24', 2, len(pubkey), len(signature)) ) 318 file.write(struct.pack('<4sIII', 'Cr24', 2, len(pubkey), len(signature)) )
356 file.write(pubkey) 319 file.write(pubkey)
357 file.write(signature) 320 file.write(signature)
358 file.write(zipdata) 321 file.write(zipdata)
359 322
360 323
324 def add_devenv_requirements(files, metadata, params):
325 files.read(
326 os.path.join(os.path.dirname(__file__), 'chromeDevenvPoller__.js'),
327 relpath='devenvPoller__.js',
328 )
329 files['devenvVersion__'] = str(random.random())
330
331 if metadata.has_option('general', 'testScripts'):
332 files['qunit/index.html'] = createScriptPage(
333 params, 'testIndex.html.tmpl', ('general', 'testScripts')
334 )
335
336
361 def createBuild(baseDir, type='chrome', outFile=None, buildNum=None, releaseBuil d=False, keyFile=None, devenv=False): 337 def createBuild(baseDir, type='chrome', outFile=None, buildNum=None, releaseBuil d=False, keyFile=None, devenv=False):
362 metadata = readMetadata(baseDir, type) 338 metadata = readMetadata(baseDir, type)
363 version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum) 339 version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum)
364 340
365 if outFile == None: 341 if outFile == None:
366 if type == 'gecko-webext': 342 if type == 'gecko':
367 file_extension = 'xpi' 343 file_extension = 'xpi'
368 else: 344 else:
369 file_extension = 'crx' if keyFile else 'zip' 345 file_extension = 'crx' if keyFile else 'zip'
370 outFile = getDefaultFileName(metadata, version, file_extension) 346 outFile = getDefaultFileName(metadata, version, file_extension)
371 347
372 params = { 348 params = {
373 'type': type, 349 'type': type,
374 'baseDir': baseDir, 350 'baseDir': baseDir,
375 'releaseBuild': releaseBuild, 351 'releaseBuild': releaseBuild,
376 'version': version, 352 'version': version,
377 'devenv': devenv, 353 'devenv': devenv,
378 'metadata': metadata, 354 'metadata': metadata,
379 } 355 }
380 356
381 mapped = metadata.items('mapping') if metadata.has_section('mapping') else [ ] 357 mapped = metadata.items('mapping') if metadata.has_section('mapping') else [ ]
382 files = Files(getPackageFiles(params), getIgnoredFiles(params), 358 files = Files(getPackageFiles(params), getIgnoredFiles(params),
383 process=lambda path, data: processFile(path, data, params)) 359 process=lambda path, data: processFile(path, data, params))
384 360
385 files.readMappedFiles(mapped) 361 files.readMappedFiles(mapped)
386 files.read(baseDir, skip=[opt for opt, _ in mapped]) 362 files.read(baseDir, skip=[opt for opt, _ in mapped])
387 363
388 if metadata.has_section('convert_js'): 364 if metadata.has_section('bundles'):
389 convertJS(params, files) 365 create_bundles(params, files)
390 366
391 if metadata.has_section('preprocess'): 367 if metadata.has_section('preprocess'):
392 files.preprocess( 368 files.preprocess(
393 [f for f, _ in metadata.items('preprocess')], 369 [f for f, _ in metadata.items('preprocess')],
394 {'needsExt': True} 370 {'needsExt': True}
395 ) 371 )
396 372
397 if metadata.has_section('import_locales'): 373 if metadata.has_section('import_locales'):
398 import_locales(params, files) 374 import_locales(params, files)
399 375
400 files['manifest.json'] = createManifest(params, files) 376 files['manifest.json'] = createManifest(params, files)
401 if type == 'chrome': 377 if type == 'chrome':
402 fixTranslationsForCWS(files) 378 fix_translations_for_chrome(files)
403 379
404 if devenv: 380 if devenv:
405 import buildtools 381 add_devenv_requirements(files, metadata, params)
406 import random
407 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js')
408 files['devenvVersion__'] = str(random.random())
409
410 if metadata.has_option('general', 'testScripts'):
411 files['qunit/index.html'] = createScriptPage(
412 params, 'testIndex.html.tmpl', ('general', 'testScripts')
413 )
414 382
415 zipdata = files.zipToString() 383 zipdata = files.zipToString()
416 signature = None 384 signature = None
417 pubkey = None 385 pubkey = None
418 if keyFile != None: 386 if keyFile != None:
419 signature = signBinary(zipdata, keyFile) 387 signature = signBinary(zipdata, keyFile)
420 pubkey = getPublicKey(keyFile) 388 pubkey = getPublicKey(keyFile)
421 writePackage(outFile, pubkey, signature, zipdata) 389 writePackage(outFile, pubkey, signature, zipdata)
LEFTRIGHT

Powered by Google App Engine
This is Rietveld