Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: packagerChrome.py

Issue 29549786: Issue 5535 - Replace our module system with webpack (Closed)
Left Patch Set: Delete all temporary files at the same time Created Sept. 22, 2017, 2 p.m.
Right Patch Set: Addressed final nits Created Oct. 10, 2017, 5:02 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « package-lock.json ('k') | packagerEdge.py » ('j') | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 # This Source Code Form is subject to the terms of the Mozilla Public 1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this 2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
4 4
5 import errno 5 import errno
6 import glob
6 import io 7 import io
7 import json 8 import json
8 import os 9 import os
9 import re 10 import re
10 import shutil
11 from StringIO import StringIO 11 from StringIO import StringIO
12 import struct 12 import struct
13 import subprocess 13 import subprocess
14 import sys 14 import sys
15 import tempfile
16 15
17 from packager import (readMetadata, getDefaultFileName, getBuildVersion, 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion,
18 getTemplate, Files) 17 getTemplate, Files)
19 18
20 defaultLocale = 'en_US' 19 defaultLocale = 'en_US'
21 20
22 21
23 def getIgnoredFiles(params): 22 def getIgnoredFiles(params):
24 return {'store.description'} 23 return {'store.description'}
25 24
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
134 # Normalize JSON structure 133 # Normalize JSON structure
135 licenseComment = re.compile(r'/\*.*?\*/', re.S) 134 licenseComment = re.compile(r'/\*.*?\*/', re.S)
136 data = json.loads(re.sub(licenseComment, '', manifest, 1)) 135 data = json.loads(re.sub(licenseComment, '', manifest, 1))
137 if '_dummy' in data: 136 if '_dummy' in data:
138 del data['_dummy'] 137 del data['_dummy']
139 manifest = json.dumps(data, sort_keys=True, indent=2) 138 manifest = json.dumps(data, sort_keys=True, indent=2)
140 139
141 return manifest.encode('utf-8') 140 return manifest.encode('utf-8')
142 141
143 142
143 def toJson(data):
144 return json.dumps(
145 data, ensure_ascii=False, sort_keys=True,
146 indent=2, separators=(',', ': ')
147 ).encode('utf-8') + '\n'
148
149
144 def create_bundles(params, files): 150 def create_bundles(params, files):
145 base_extension_path = params['baseDir'] 151 base_extension_path = params['baseDir']
146 info_templates = { 152 info_templates = {
147 'chrome': 'chromeInfo.js.tmpl', 153 'chrome': 'chromeInfo.js.tmpl',
148 'edge': 'edgeInfo.js.tmpl', 154 'edge': 'edgeInfo.js.tmpl',
149 'gecko-webext': 'geckoInfo.js.tmpl' 155 'gecko-webext': 'geckoInfo.js.tmpl'
150 } 156 }
151 info_module = None 157
152 158 # Historically we didn't use relative paths when requiring modules, so in
153 # Once we use relative paths when requiring modules we can remove these, 159 # order for webpack to know where to find them we need to pass in a list of
154 # but in the mean time Webpack needs to know where to look. 160 # resolve paths. Going forward we should always use relative paths, once we
155 resolve_paths = ' '.join(['./lib', 161 # do that consistently this can be removed. See issues 5760, 5761 and 5762.
156 './adblockpluscore/lib', 162 resolve_paths = [os.path.join(base_extension_path, dir, 'lib')
157 './adblockplusui/lib']) 163 for dir in ['', 'adblockpluscore', 'adblockplusui']]
Sebastian Noack 2017/09/22 19:28:02 Mind filing a follow up issue (and referring to it
Sebastian Noack 2017/09/22 22:21:12 Never mind, these issues have already been filed,
kzar 2017/09/23 20:26:36 OK since we're now considering removing buildtools
Sebastian Noack 2017/09/23 20:49:01 It seems you missed this part?
kzar 2017/09/24 09:47:53 Oh, I forgot to mention that I tried to file the i
158 try: 164
159 temp_dir = tempfile.mkdtemp() 165 info_template = getTemplate(info_templates[params['type']])
Sebastian Noack 2017/09/22 19:28:02 This should be outside of the try-finally block. O
kzar 2017/09/23 20:26:35 Done.
160 template = getTemplate(info_templates[params['type']]) 166 info_module = info_template.render(
161 with tempfile.NamedTemporaryFile(delete=False, 167 basename=params['metadata'].get('general', 'basename'),
Sebastian Noack 2017/09/22 19:28:02 Do we need to use NamedTemporaryFile here, or can'
kzar 2017/09/23 20:26:35 Done. (But I wonder if the file name is more likel
162 dir=temp_dir) as info_file: 168 version=params['metadata'].get('general', 'version')
163 info_file.write( 169 ).encode('utf-8')
164 template.render( 170
165 basename=params['metadata'].get('general', 'basename'), 171 configuration = {
166 version=params['metadata'].get('general', 'version') 172 'bundles': [],
167 ).encode('utf-8') 173 'extension_path': base_extension_path,
168 ) 174 'info_module': info_module,
169 info_module = info_file.name 175 'resolve_paths': resolve_paths,
170 176 }
171 for item in params['metadata'].items('bundles'): 177
172 name, value = item 178 for item in params['metadata'].items('bundles'):
173 base_item_path = os.path.dirname(item.source) 179 name, value = item
174 180 base_item_path = os.path.dirname(item.source)
175 bundle_file = os.path.relpath(os.path.join(base_item_path, name), 181
176 base_extension_path) 182 bundle_file = os.path.relpath(os.path.join(base_item_path, name),
177 entry_files = [ 183 base_extension_path)
178 os.path.join( 184 entry_files = [os.path.join(base_item_path, module_path)
179 '.', 185 for module_path in value.split()]
180 os.path.relpath(os.path.join(base_item_path, module_path), 186 configuration['bundles'].append({
181 base_extension_path) 187 'bundle_name': bundle_file,
182 ) 188 'entry_points': entry_files,
183 for module_path in value.split() 189 })
184 ] 190
185 subprocess.check_call( 191 cmd = ['node', os.path.join(os.path.dirname(__file__), 'webpack_runner.js')]
186 ['npm', 'run-script', 'webpack', '--silent'], 192 process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
187 cwd=os.path.dirname(__file__), 193 stdin=subprocess.PIPE)
188 env={ 194 output = process.communicate(input=toJson(configuration))[0]
189 'EXTENSION_PATH': base_extension_path, 195 if process.returncode != 0:
190 'ENTRY_POINTS': ' '.join(entry_files), 196 raise subprocess.CalledProcessError(process.returncode, cmd=cmd)
191 'OUTPUT_PATH': temp_dir, 197
192 'BUNDLE_NAME': bundle_file, 198 bundles = json.loads(output)
193 'RESOLVE_PATHS': resolve_paths, 199 for bundle in bundles:
194 'INFO_PATH': info_module, 200 files[bundle] = bundles[bundle].encode('utf-8')
195 'PATH': os.environ['PATH']
196 }
197 )
198 for file_name in [bundle_file, bundle_file + '.map']:
199 with open(os.path.join(temp_dir, file_name), 'r') as f:
200 files[file_name] = f.read()
201 finally:
202 shutil.rmtree(temp_dir)
203
204
205 def toJson(data):
206 return json.dumps(
207 data, ensure_ascii=False, sort_keys=True,
208 indent=2, separators=(',', ': ')
209 ).encode('utf-8') + '\n'
210
211
212 def import_string_webext(data, key, source):
213 """Import a single translation from the source dictionary into data"""
214 data[key] = source
215
216
217 def import_string_gecko(data, key, value):
218 """Import Gecko-style locales into data.
219
220 Only sets {'message': value} in the data-dictionary, after stripping
221 undesired Gecko-style access keys.
222 """
223 match = re.search(r'^(.*?)\s*\(&.\)$', value)
224 if match:
225 value = match.group(1)
226 else:
227 index = value.find('&')
228 if index >= 0:
229 value = value[0:index] + value[index + 1:]
230
231 data[key] = {'message': value}
232 201
233 202
234 def import_locales(params, files): 203 def import_locales(params, files):
235 import localeTools 204 for item in params['metadata'].items('import_locales'):
236 205 filename, keys = item
237 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as 206 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source),
238 # separator instead. 207 *filename.split('/'))):
239 convert_locale_code = lambda code: code.replace('-', '_') 208 locale = sourceFile.split(os.path.sep)[-2]
240 209 targetFile = os.path.join('_locales', locale, 'messages.json')
241 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome 210 data = json.loads(files.get(targetFile, '{}').decode('utf-8'))
242 # locales to themselves, merely with the dash as separator.
243 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal es}
244
245 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping).
246 for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems ():
247 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale
248
249 # Now convert values to Gecko locales (use Gecko => Crowdin mapping).
250 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()}
251 for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems() :
252 if crowdin_locale in reverse_mapping:
253 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale
254
255 for target, source in locale_mapping.iteritems():
256 targetFile = '_locales/%s/messages.json' % target
257 if not targetFile in files:
258 continue
259
260 for item in params['metadata'].items('import_locales'):
261 fileName, keys = item
262 parts = map(lambda n: source if n == '*' else n, fileName.split('/') )
263 sourceFile = os.path.join(os.path.dirname(item.source), *parts)
264 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom plete')
265 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker ):
266 continue
267
268 data = json.loads(files[targetFile].decode('utf-8'))
269 211
270 try: 212 try:
271 # The WebExtensions (.json) and Gecko format provide 213 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
272 # translations differently and/or provide additional 214 sourceData = json.load(handle)
273 # information like e.g. "placeholders". We want to adhere to
274 # that and preserve the addtional info.
275 if sourceFile.endswith('.json'):
276 with io.open(sourceFile, 'r', encoding='utf-8') as handle:
277 sourceData = json.load(handle)
278 import_string = import_string_webext
279 else:
280 sourceData = localeTools.readFile(sourceFile)
281 import_string = import_string_gecko
282 215
283 # Resolve wildcard imports 216 # Resolve wildcard imports
284 if keys == '*' or keys == '=*': 217 if keys == '*':
285 importList = sourceData.keys() 218 importList = sourceData.keys()
286 importList = filter(lambda k: not k.startswith('_'), importL ist) 219 importList = filter(lambda k: not k.startswith('_'), importL ist)
287 if keys == '=*':
288 importList = map(lambda k: '=' + k, importList)
289 keys = ' '.join(importList) 220 keys = ' '.join(importList)
290 221
291 for stringID in keys.split(): 222 for stringID in keys.split():
292 noMangling = False
293 if stringID.startswith('='):
294 stringID = stringID[1:]
295 noMangling = True
296
297 if stringID in sourceData: 223 if stringID in sourceData:
298 if noMangling: 224 if stringID in data:
299 key = re.sub(r'\W', '_', stringID) 225 print ('Warning: locale string {} defined multiple'
300 else: 226 ' times').format(stringID)
301 key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub( r'\W', '_', stringID) 227
302 if key in data: 228 data[stringID] = sourceData[stringID]
303 print 'Warning: locale string %s defined multiple ti mes' % key
304
305 import_string(data, key, sourceData[stringID])
306 except Exception as e: 229 except Exception as e:
307 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e) 230 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e)
308 231
309 files[targetFile] = toJson(data) 232 files[targetFile] = toJson(data)
310 233
311 234
312 def truncate(text, length_limit): 235 def truncate(text, length_limit):
313 if len(text) <= length_limit: 236 if len(text) <= length_limit:
314 return text 237 return text
315 return text[:length_limit - 1].rstrip() + u'\u2026' 238 return text[:length_limit - 1].rstrip() + u'\u2026'
316 239
317 240
318 def fixTranslationsForCWS(files): 241 def fix_translations_for_chrome(files):
319 # Chrome Web Store requires messages used in manifest.json to be present in
320 # all languages. It also enforces length limits for extension names and
321 # descriptions.
322 defaults = {} 242 defaults = {}
323 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) 243 data = json.loads(files['_locales/%s/messages.json' % defaultLocale])
324 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): 244 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']):
325 name = match.group(1) 245 name = match.group(1)
326 defaults[name] = data[name] 246 defaults[name] = data[name]
327 247
328 limits = {} 248 limits = {}
329 manifest = json.loads(files['manifest.json']) 249 manifest = json.loads(files['manifest.json'])
330 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): 250 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)):
331 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) 251 match = re.search(r'__MSG_(\S+)__', manifest.get(key, ''))
332 if match: 252 if match:
333 limits[match.group(1)] = limit 253 limits[match.group(1)] = limit
334 254
335 for filename in files: 255 for path in list(files):
336 if not filename.startswith('_locales/') or not filename.endswith('/messa ges.json'): 256 match = re.search(r'^_locales/(?:es_(AR|CL|(MX))|[^/]+)/(.*)', path)
257 if not match:
337 continue 258 continue
338 259
339 data = json.loads(files[filename]) 260 # The Chrome Web Store requires messages used in manifest.json to
340 for name, info in defaults.iteritems(): 261 # be present in all languages, and enforces length limits on
341 data.setdefault(name, info) 262 # extension name and description.
342 for name, limit in limits.iteritems(): 263 is_latam, is_mexican, filename = match.groups()
343 if name in data: 264 if filename == 'messages.json':
344 data[name]['message'] = truncate(data[name]['message'], limit) 265 data = json.loads(files[path])
345 files[filename] = toJson(data) 266 for name, info in defaults.iteritems():
267 data.setdefault(name, info)
268 for name, limit in limits.iteritems():
269 info = data.get(name)
270 if info:
271 info['message'] = truncate(info['message'], limit)
272 files[path] = toJson(data)
273
274 # Chrome combines Latin American dialects of Spanish into es-419.
275 if is_latam:
276 data = files.pop(path)
277 if is_mexican:
278 files['_locales/es_419/' + filename] = data
346 279
347 280
348 def signBinary(zipdata, keyFile): 281 def signBinary(zipdata, keyFile):
349 from Crypto.Hash import SHA 282 from Crypto.Hash import SHA
350 from Crypto.PublicKey import RSA 283 from Crypto.PublicKey import RSA
351 from Crypto.Signature import PKCS1_v1_5 284 from Crypto.Signature import PKCS1_v1_5
352 285
353 try: 286 try:
354 with open(keyFile, 'rb') as file: 287 with open(keyFile, 'rb') as file:
355 key = RSA.importKey(file.read()) 288 key = RSA.importKey(file.read())
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
415 files.preprocess( 348 files.preprocess(
416 [f for f, _ in metadata.items('preprocess')], 349 [f for f, _ in metadata.items('preprocess')],
417 {'needsExt': True} 350 {'needsExt': True}
418 ) 351 )
419 352
420 if metadata.has_section('import_locales'): 353 if metadata.has_section('import_locales'):
421 import_locales(params, files) 354 import_locales(params, files)
422 355
423 files['manifest.json'] = createManifest(params, files) 356 files['manifest.json'] = createManifest(params, files)
424 if type == 'chrome': 357 if type == 'chrome':
425 fixTranslationsForCWS(files) 358 fix_translations_for_chrome(files)
426 359
427 if devenv: 360 if devenv:
428 import buildtools 361 import buildtools
429 import random 362 import random
430 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js') 363 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js')
431 files['devenvVersion__'] = str(random.random()) 364 files['devenvVersion__'] = str(random.random())
432 365
433 if metadata.has_option('general', 'testScripts'): 366 if metadata.has_option('general', 'testScripts'):
434 files['qunit/index.html'] = createScriptPage( 367 files['qunit/index.html'] = createScriptPage(
435 params, 'testIndex.html.tmpl', ('general', 'testScripts') 368 params, 'testIndex.html.tmpl', ('general', 'testScripts')
436 ) 369 )
437 370
438 zipdata = files.zipToString() 371 zipdata = files.zipToString()
439 signature = None 372 signature = None
440 pubkey = None 373 pubkey = None
441 if keyFile != None: 374 if keyFile != None:
442 signature = signBinary(zipdata, keyFile) 375 signature = signBinary(zipdata, keyFile)
443 pubkey = getPublicKey(keyFile) 376 pubkey = getPublicKey(keyFile)
444 writePackage(outFile, pubkey, signature, zipdata) 377 writePackage(outFile, pubkey, signature, zipdata)
LEFTRIGHT

Powered by Google App Engine
This is Rietveld