Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: packagerChrome.py

Issue 29549786: Issue 5535 - Replace our module system with webpack (Closed)
Left Patch Set: Fixed spelling mistake Created Sept. 19, 2017, 2:49 p.m.
Right Patch Set: Addressed final nits Created Oct. 10, 2017, 5:02 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « package-lock.json ('k') | packagerEdge.py » ('j') | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 # This Source Code Form is subject to the terms of the Mozilla Public 1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this 2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
4 4
5 import errno 5 import errno
6 import glob
6 import io 7 import io
7 import json 8 import json
8 import os 9 import os
9 import re 10 import re
10 import shutil
11 from StringIO import StringIO 11 from StringIO import StringIO
12 import struct 12 import struct
13 import subprocess 13 import subprocess
14 import sys 14 import sys
15 import tempfile
16 15
17 from packager import (readMetadata, getDefaultFileName, getBuildVersion, 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion,
18 getTemplate, Files) 17 getTemplate, Files)
19 18
20 defaultLocale = 'en_US' 19 defaultLocale = 'en_US'
21 20
22 21
23 def getIgnoredFiles(params): 22 def getIgnoredFiles(params):
24 return {'store.description'} 23 return {'store.description'}
25 24
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
134 # Normalize JSON structure 133 # Normalize JSON structure
135 licenseComment = re.compile(r'/\*.*?\*/', re.S) 134 licenseComment = re.compile(r'/\*.*?\*/', re.S)
136 data = json.loads(re.sub(licenseComment, '', manifest, 1)) 135 data = json.loads(re.sub(licenseComment, '', manifest, 1))
137 if '_dummy' in data: 136 if '_dummy' in data:
138 del data['_dummy'] 137 del data['_dummy']
139 manifest = json.dumps(data, sort_keys=True, indent=2) 138 manifest = json.dumps(data, sort_keys=True, indent=2)
140 139
141 return manifest.encode('utf-8') 140 return manifest.encode('utf-8')
142 141
143 142
143 def toJson(data):
144 return json.dumps(
145 data, ensure_ascii=False, sort_keys=True,
146 indent=2, separators=(',', ': ')
147 ).encode('utf-8') + '\n'
148
149
144 def create_bundles(params, files): 150 def create_bundles(params, files):
145 base_extension_path = params['baseDir'] 151 base_extension_path = params['baseDir']
146 info_templates = { 152 info_templates = {
147 'chrome': 'chromeInfo.js.tmpl', 153 'chrome': 'chromeInfo.js.tmpl',
148 'edge': 'edgeInfo.js.tmpl', 154 'edge': 'edgeInfo.js.tmpl',
149 'gecko-webext': 'geckoInfo.js.tmpl' 155 'gecko-webext': 'geckoInfo.js.tmpl'
150 } 156 }
151 info_module = None 157
152 158 # Historically we didn't use relative paths when requiring modules, so in
153 # Once we use relative paths when requiring modules we can remove these, 159 # order for webpack to know where to find them we need to pass in a list of
154 # but in the mean time Webpack needs to know where to look. 160 # resolve paths. Going forward we should always use relative paths, once we
155 resolve_paths = ' '.join(['./lib', 161 # do that consistently this can be removed. See issues 5760, 5761 and 5762.
156 './adblockpluscore/lib', 162 resolve_paths = [os.path.join(base_extension_path, dir, 'lib')
157 './adblockplusui/lib']) 163 for dir in ['', 'adblockpluscore', 'adblockplusui']]
158 try: 164
159 temp_dir = tempfile.mkdtemp() 165 info_template = getTemplate(info_templates[params['type']])
160 template = getTemplate(info_templates[params['type']]) 166 info_module = info_template.render(
161 with tempfile.NamedTemporaryFile(delete=False) as info_file: 167 basename=params['metadata'].get('general', 'basename'),
tlucas 2017/09/22 10:27:07 You are creating two separate temporary instances
kzar 2017/09/22 14:01:51 Good idea, we might as well have everything delete
162 info_file.write( 168 version=params['metadata'].get('general', 'version')
163 template.render( 169 ).encode('utf-8')
164 basename=params['metadata'].get('general', 'basename'), 170
165 version=params['metadata'].get('general', 'version') 171 configuration = {
166 ).encode('utf-8') 172 'bundles': [],
167 ) 173 'extension_path': base_extension_path,
168 info_module = info_file.name 174 'info_module': info_module,
169 175 'resolve_paths': resolve_paths,
170 for item in params['metadata'].items('bundles'): 176 }
171 name, value = item 177
172 base_item_path = os.path.dirname(item.source) 178 for item in params['metadata'].items('bundles'):
173 179 name, value = item
174 bundle_file = os.path.relpath(os.path.join(base_item_path, name), 180 base_item_path = os.path.dirname(item.source)
175 base_extension_path) 181
176 entry_files = [ 182 bundle_file = os.path.relpath(os.path.join(base_item_path, name),
177 os.path.join( 183 base_extension_path)
178 '.', 184 entry_files = [os.path.join(base_item_path, module_path)
179 os.path.relpath(os.path.join(base_item_path, module_path), 185 for module_path in value.split()]
180 base_extension_path) 186 configuration['bundles'].append({
181 ) 187 'bundle_name': bundle_file,
182 for module_path in value.split() 188 'entry_points': entry_files,
183 ] 189 })
184 subprocess.check_call( 190
185 ['npm', 'run-script', 'webpack'], 191 cmd = ['node', os.path.join(os.path.dirname(__file__), 'webpack_runner.js')]
186 cwd=os.path.dirname(__file__), 192 process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
187 env={ 193 stdin=subprocess.PIPE)
188 'EXTENSION_PATH': base_extension_path, 194 output = process.communicate(input=toJson(configuration))[0]
189 'ENTRY_POINTS': ' '.join(entry_files), 195 if process.returncode != 0:
190 'OUTPUT_PATH': temp_dir, 196 raise subprocess.CalledProcessError(process.returncode, cmd=cmd)
191 'BUNDLE_NAME': bundle_file, 197
192 'RESOLVE_PATHS': resolve_paths, 198 bundles = json.loads(output)
193 'INFO_PATH': info_module, 199 for bundle in bundles:
194 'PATH': os.environ['PATH'] 200 files[bundle] = bundles[bundle].encode('utf-8')
195 }
196 )
197 for file_name in [bundle_file, bundle_file + '.map']:
198 with open(os.path.join(temp_dir, file_name), 'r') as f:
199 files[file_name] = f.read()
200 finally:
201 if info_module:
202 os.remove(info_module)
203 shutil.rmtree(temp_dir)
204
205
206 def toJson(data):
207 return json.dumps(
208 data, ensure_ascii=False, sort_keys=True,
209 indent=2, separators=(',', ': ')
210 ).encode('utf-8') + '\n'
211
212
213 def import_string_webext(data, key, source):
214 """Import a single translation from the source dictionary into data"""
215 data[key] = source
216
217
218 def import_string_gecko(data, key, value):
219 """Import Gecko-style locales into data.
220
221 Only sets {'message': value} in the data-dictionary, after stripping
222 undesired Gecko-style access keys.
223 """
224 match = re.search(r'^(.*?)\s*\(&.\)$', value)
225 if match:
226 value = match.group(1)
227 else:
228 index = value.find('&')
229 if index >= 0:
230 value = value[0:index] + value[index + 1:]
231
232 data[key] = {'message': value}
233 201
234 202
235 def import_locales(params, files): 203 def import_locales(params, files):
236 import localeTools 204 for item in params['metadata'].items('import_locales'):
237 205 filename, keys = item
238 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as 206 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source),
239 # separator instead. 207 *filename.split('/'))):
240 convert_locale_code = lambda code: code.replace('-', '_') 208 locale = sourceFile.split(os.path.sep)[-2]
241 209 targetFile = os.path.join('_locales', locale, 'messages.json')
242 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome 210 data = json.loads(files.get(targetFile, '{}').decode('utf-8'))
243 # locales to themselves, merely with the dash as separator.
244 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal es}
245
246 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping).
247 for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems ():
248 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale
249
250 # Now convert values to Gecko locales (use Gecko => Crowdin mapping).
251 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()}
252 for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems() :
253 if crowdin_locale in reverse_mapping:
254 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale
255
256 for target, source in locale_mapping.iteritems():
257 targetFile = '_locales/%s/messages.json' % target
258 if not targetFile in files:
259 continue
260
261 for item in params['metadata'].items('import_locales'):
262 fileName, keys = item
263 parts = map(lambda n: source if n == '*' else n, fileName.split('/') )
264 sourceFile = os.path.join(os.path.dirname(item.source), *parts)
265 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom plete')
266 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker ):
267 continue
268
269 data = json.loads(files[targetFile].decode('utf-8'))
270 211
271 try: 212 try:
272 # The WebExtensions (.json) and Gecko format provide 213 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
273 # translations differently and/or provide additional 214 sourceData = json.load(handle)
274 # information like e.g. "placeholders". We want to adhere to
275 # that and preserve the addtional info.
276 if sourceFile.endswith('.json'):
277 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
278 sourceData = json.load(handle)
279 import_string = import_string_webext
280 else:
281 sourceData = localeTools.readFile(sourceFile)
282 import_string = import_string_gecko
283 215
284 # Resolve wildcard imports 216 # Resolve wildcard imports
285 if keys == '*' or keys == '=*': 217 if keys == '*':
286 importList = sourceData.keys() 218 importList = sourceData.keys()
287 importList = filter(lambda k: not k.startswith('_'), importL ist) 219 importList = filter(lambda k: not k.startswith('_'), importL ist)
288 if keys == '=*':
289 importList = map(lambda k: '=' + k, importList)
290 keys = ' '.join(importList) 220 keys = ' '.join(importList)
291 221
292 for stringID in keys.split(): 222 for stringID in keys.split():
293 noMangling = False
294 if stringID.startswith('='):
295 stringID = stringID[1:]
296 noMangling = True
297
298 if stringID in sourceData: 223 if stringID in sourceData:
299 if noMangling: 224 if stringID in data:
300 key = re.sub(r'\W', '_', stringID) 225 print ('Warning: locale string {} defined multiple'
301 else: 226 ' times').format(stringID)
302 key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub( r'\W', '_', stringID) 227
303 if key in data: 228 data[stringID] = sourceData[stringID]
304 print 'Warning: locale string %s defined multiple ti mes' % key
305
306 import_string(data, key, sourceData[stringID])
307 except Exception as e: 229 except Exception as e:
308 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e) 230 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e)
309 231
310 files[targetFile] = toJson(data) 232 files[targetFile] = toJson(data)
311 233
312 234
313 def truncate(text, length_limit): 235 def truncate(text, length_limit):
314 if len(text) <= length_limit: 236 if len(text) <= length_limit:
315 return text 237 return text
316 return text[:length_limit - 1].rstrip() + u'\u2026' 238 return text[:length_limit - 1].rstrip() + u'\u2026'
317 239
318 240
319 def fixTranslationsForCWS(files): 241 def fix_translations_for_chrome(files):
320 # Chrome Web Store requires messages used in manifest.json to be present in
321 # all languages. It also enforces length limits for extension names and
322 # descriptions.
323 defaults = {} 242 defaults = {}
324 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) 243 data = json.loads(files['_locales/%s/messages.json' % defaultLocale])
325 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): 244 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']):
326 name = match.group(1) 245 name = match.group(1)
327 defaults[name] = data[name] 246 defaults[name] = data[name]
328 247
329 limits = {} 248 limits = {}
330 manifest = json.loads(files['manifest.json']) 249 manifest = json.loads(files['manifest.json'])
331 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): 250 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)):
332 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) 251 match = re.search(r'__MSG_(\S+)__', manifest.get(key, ''))
333 if match: 252 if match:
334 limits[match.group(1)] = limit 253 limits[match.group(1)] = limit
335 254
336 for filename in files: 255 for path in list(files):
337 if not filename.startswith('_locales/') or not filename.endswith('/messa ges.json'): 256 match = re.search(r'^_locales/(?:es_(AR|CL|(MX))|[^/]+)/(.*)', path)
257 if not match:
338 continue 258 continue
339 259
340 data = json.loads(files[filename]) 260 # The Chrome Web Store requires messages used in manifest.json to
341 for name, info in defaults.iteritems(): 261 # be present in all languages, and enforces length limits on
342 data.setdefault(name, info) 262 # extension name and description.
343 for name, limit in limits.iteritems(): 263 is_latam, is_mexican, filename = match.groups()
344 if name in data: 264 if filename == 'messages.json':
345 data[name]['message'] = truncate(data[name]['message'], limit) 265 data = json.loads(files[path])
346 files[filename] = toJson(data) 266 for name, info in defaults.iteritems():
267 data.setdefault(name, info)
268 for name, limit in limits.iteritems():
269 info = data.get(name)
270 if info:
271 info['message'] = truncate(info['message'], limit)
272 files[path] = toJson(data)
273
274 # Chrome combines Latin American dialects of Spanish into es-419.
275 if is_latam:
276 data = files.pop(path)
277 if is_mexican:
278 files['_locales/es_419/' + filename] = data
347 279
348 280
349 def signBinary(zipdata, keyFile): 281 def signBinary(zipdata, keyFile):
350 from Crypto.Hash import SHA 282 from Crypto.Hash import SHA
351 from Crypto.PublicKey import RSA 283 from Crypto.PublicKey import RSA
352 from Crypto.Signature import PKCS1_v1_5 284 from Crypto.Signature import PKCS1_v1_5
353 285
354 try: 286 try:
355 with open(keyFile, 'rb') as file: 287 with open(keyFile, 'rb') as file:
356 key = RSA.importKey(file.read()) 288 key = RSA.importKey(file.read())
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
416 files.preprocess( 348 files.preprocess(
417 [f for f, _ in metadata.items('preprocess')], 349 [f for f, _ in metadata.items('preprocess')],
418 {'needsExt': True} 350 {'needsExt': True}
419 ) 351 )
420 352
421 if metadata.has_section('import_locales'): 353 if metadata.has_section('import_locales'):
422 import_locales(params, files) 354 import_locales(params, files)
423 355
424 files['manifest.json'] = createManifest(params, files) 356 files['manifest.json'] = createManifest(params, files)
425 if type == 'chrome': 357 if type == 'chrome':
426 fixTranslationsForCWS(files) 358 fix_translations_for_chrome(files)
427 359
428 if devenv: 360 if devenv:
429 import buildtools 361 import buildtools
430 import random 362 import random
431 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js') 363 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js')
432 files['devenvVersion__'] = str(random.random()) 364 files['devenvVersion__'] = str(random.random())
433 365
434 if metadata.has_option('general', 'testScripts'): 366 if metadata.has_option('general', 'testScripts'):
435 files['qunit/index.html'] = createScriptPage( 367 files['qunit/index.html'] = createScriptPage(
436 params, 'testIndex.html.tmpl', ('general', 'testScripts') 368 params, 'testIndex.html.tmpl', ('general', 'testScripts')
437 ) 369 )
438 370
439 zipdata = files.zipToString() 371 zipdata = files.zipToString()
440 signature = None 372 signature = None
441 pubkey = None 373 pubkey = None
442 if keyFile != None: 374 if keyFile != None:
443 signature = signBinary(zipdata, keyFile) 375 signature = signBinary(zipdata, keyFile)
444 pubkey = getPublicKey(keyFile) 376 pubkey = getPublicKey(keyFile)
445 writePackage(outFile, pubkey, signature, zipdata) 377 writePackage(outFile, pubkey, signature, zipdata)
LEFTRIGHT

Powered by Google App Engine
This is Rietveld