Left: | ||
Right: |
OLD | NEW |
---|---|
1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public |
2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this |
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
4 | 4 |
5 import errno | 5 import errno |
6 import io | 6 import io |
7 import json | 7 import json |
8 import os | 8 import os |
9 import re | 9 import re |
10 from StringIO import StringIO | 10 from StringIO import StringIO |
11 import struct | 11 import struct |
12 import sys | 12 import sys |
13 import collections | 13 import collections |
14 import glob | |
14 | 15 |
15 from packager import (readMetadata, getDefaultFileName, getBuildVersion, | 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion, |
16 getTemplate, Files) | 17 getTemplate, Files) |
17 | 18 |
18 defaultLocale = 'en_US' | 19 defaultLocale = 'en_US' |
19 | 20 |
20 | 21 |
21 def getIgnoredFiles(params): | 22 def getIgnoredFiles(params): |
22 return {'store.description'} | 23 return {'store.description'} |
23 | 24 |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
206 value = match.group(1) | 207 value = match.group(1) |
207 else: | 208 else: |
208 index = value.find('&') | 209 index = value.find('&') |
209 if index >= 0: | 210 if index >= 0: |
210 value = value[0:index] + value[index + 1:] | 211 value = value[0:index] + value[index + 1:] |
211 | 212 |
212 data[key] = {'message': value} | 213 data[key] = {'message': value} |
213 | 214 |
214 | 215 |
215 def import_locales(params, files): | 216 def import_locales(params, files): |
216 import localeTools | 217 for item in params['metadata'].items('import_locales'): |
217 | 218 filename, keys = item |
218 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as | 219 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source), |
219 # separator instead. | 220 *filename.split('/'))): |
220 convert_locale_code = lambda code: code.replace('-', '_') | 221 parts = sourceFile.split(os.path.sep) |
Sebastian Noack
2017/10/05 20:58:38
I just noticed that the variable "parts" is used b
tlucas
2017/10/06 08:53:53
Acknowledged.
| |
221 | 222 locale = parts[-2].replace('-', '_') |
222 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome | 223 targetFile = os.path.join('_locales', locale, 'messages.json') |
223 # locales to themselves, merely with the dash as separator. | 224 data = json.loads(files.get(targetFile, '{}').decode('utf-8')) |
224 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal es} | |
225 | |
226 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping). | |
227 for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems (): | |
228 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale | |
229 | |
230 # Now convert values to Gecko locales (use Gecko => Crowdin mapping). | |
231 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()} | |
232 for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems() : | |
233 if crowdin_locale in reverse_mapping: | |
234 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale | |
235 | |
236 for target, source in locale_mapping.iteritems(): | |
237 targetFile = '_locales/%s/messages.json' % target | |
238 if not targetFile in files: | |
239 continue | |
240 | |
241 for item in params['metadata'].items('import_locales'): | |
242 fileName, keys = item | |
243 parts = map(lambda n: source if n == '*' else n, fileName.split('/') ) | |
244 sourceFile = os.path.join(os.path.dirname(item.source), *parts) | |
245 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom plete') | |
246 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker ): | |
247 continue | |
248 | |
249 data = json.loads(files[targetFile].decode('utf-8')) | |
250 | 225 |
251 try: | 226 try: |
252 # The WebExtensions (.json) and Gecko format provide | 227 # The WebExtensions (.json) and Gecko format provide |
253 # translations differently and/or provide additional | 228 # translations differently and/or provide additional |
254 # information like e.g. "placeholders". We want to adhere to | 229 # information like e.g. "placeholders". We want to adhere to |
255 # that and preserve the addtional info. | 230 # that and preserve the addtional info. |
256 if sourceFile.endswith('.json'): | 231 if sourceFile.endswith('.json'): |
257 with io.open(sourceFile, 'r', encoding='utf-8') as handle: | 232 with io.open(sourceFile, 'r', encoding='utf-8') as handle: |
258 sourceData = json.load(handle) | 233 sourceData = json.load(handle) |
259 import_string = import_string_webext | 234 import_string = import_string_webext |
260 else: | 235 else: |
236 import localeTools | |
261 sourceData = localeTools.readFile(sourceFile) | 237 sourceData = localeTools.readFile(sourceFile) |
262 import_string = import_string_gecko | 238 import_string = import_string_gecko |
263 | 239 |
264 # Resolve wildcard imports | 240 # Resolve wildcard imports |
265 if keys == '*' or keys == '=*': | 241 if keys == '*' or keys == '=*': |
266 importList = sourceData.keys() | 242 importList = sourceData.keys() |
267 importList = filter(lambda k: not k.startswith('_'), importL ist) | 243 importList = filter(lambda k: not k.startswith('_'), importL ist) |
268 if keys == '=*': | 244 if keys == '=*': |
269 importList = map(lambda k: '=' + k, importList) | 245 importList = map(lambda k: '=' + k, importList) |
270 keys = ' '.join(importList) | 246 keys = ' '.join(importList) |
(...skipping 18 matching lines...) Expand all Loading... | |
289 | 265 |
290 files[targetFile] = toJson(data) | 266 files[targetFile] = toJson(data) |
291 | 267 |
292 | 268 |
293 def truncate(text, length_limit): | 269 def truncate(text, length_limit): |
294 if len(text) <= length_limit: | 270 if len(text) <= length_limit: |
295 return text | 271 return text |
296 return text[:length_limit - 1].rstrip() + u'\u2026' | 272 return text[:length_limit - 1].rstrip() + u'\u2026' |
297 | 273 |
298 | 274 |
299 def fixTranslationsForCWS(files): | 275 def fix_translations_for_chrome(files): |
300 # Chrome Web Store requires messages used in manifest.json to be present in | |
301 # all languages. It also enforces length limits for extension names and | |
302 # descriptions. | |
303 defaults = {} | 276 defaults = {} |
304 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) | 277 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) |
305 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): | 278 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): |
306 name = match.group(1) | 279 name = match.group(1) |
307 defaults[name] = data[name] | 280 defaults[name] = data[name] |
308 | 281 |
309 limits = {} | 282 limits = {} |
310 manifest = json.loads(files['manifest.json']) | 283 manifest = json.loads(files['manifest.json']) |
311 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): | 284 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): |
312 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) | 285 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) |
313 if match: | 286 if match: |
314 limits[match.group(1)] = limit | 287 limits[match.group(1)] = limit |
315 | 288 |
316 for filename in files: | 289 for path in list(files): |
317 if not filename.startswith('_locales/') or not filename.endswith('/messa ges.json'): | 290 match = re.search(r'^_locales/(?:es_(AR|CL|(MX))|[^/]+)/(.*)', path) |
291 if not match: | |
318 continue | 292 continue |
319 | 293 |
320 data = json.loads(files[filename]) | 294 # The Chrome Web Store requires messages used in manifest.json to |
321 for name, info in defaults.iteritems(): | 295 # be present in all languages, and enforces length limits on |
322 data.setdefault(name, info) | 296 # extension name and description. |
323 for name, limit in limits.iteritems(): | 297 is_latam, is_mexican, filename = match.groups() |
324 if name in data: | 298 if filename == 'messages.json': |
325 data[name]['message'] = truncate(data[name]['message'], limit) | 299 data = json.loads(files[path]) |
326 files[filename] = toJson(data) | 300 for name, info in defaults.iteritems(): |
301 data.setdefault(name, info) | |
302 for name, limit in limits.iteritems(): | |
303 info = data.get(name) | |
304 if info: | |
305 info['message'] = truncate(info['message'], limit) | |
306 files[path] = toJson(data) | |
307 | |
308 # Chrome combines Latin American dialects of Spanish into es-419. | |
309 if is_latam: | |
310 data = files.pop(path) | |
311 if is_mexican: | |
312 files['_locales/es_419/' + filename] = data | |
327 | 313 |
328 | 314 |
329 def signBinary(zipdata, keyFile): | 315 def signBinary(zipdata, keyFile): |
330 from Crypto.Hash import SHA | 316 from Crypto.Hash import SHA |
331 from Crypto.PublicKey import RSA | 317 from Crypto.PublicKey import RSA |
332 from Crypto.Signature import PKCS1_v1_5 | 318 from Crypto.Signature import PKCS1_v1_5 |
333 | 319 |
334 try: | 320 try: |
335 with open(keyFile, 'rb') as file: | 321 with open(keyFile, 'rb') as file: |
336 key = RSA.importKey(file.read()) | 322 key = RSA.importKey(file.read()) |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
396 files.preprocess( | 382 files.preprocess( |
397 [f for f, _ in metadata.items('preprocess')], | 383 [f for f, _ in metadata.items('preprocess')], |
398 {'needsExt': True} | 384 {'needsExt': True} |
399 ) | 385 ) |
400 | 386 |
401 if metadata.has_section('import_locales'): | 387 if metadata.has_section('import_locales'): |
402 import_locales(params, files) | 388 import_locales(params, files) |
403 | 389 |
404 files['manifest.json'] = createManifest(params, files) | 390 files['manifest.json'] = createManifest(params, files) |
405 if type == 'chrome': | 391 if type == 'chrome': |
406 fixTranslationsForCWS(files) | 392 fix_translations_for_chrome(files) |
407 | 393 |
408 if devenv: | 394 if devenv: |
409 import buildtools | 395 import buildtools |
410 import random | 396 import random |
411 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js') | 397 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js') |
412 files['devenvVersion__'] = str(random.random()) | 398 files['devenvVersion__'] = str(random.random()) |
413 | 399 |
414 if metadata.has_option('general', 'testScripts'): | 400 if metadata.has_option('general', 'testScripts'): |
415 files['qunit/index.html'] = createScriptPage( | 401 files['qunit/index.html'] = createScriptPage( |
416 params, 'testIndex.html.tmpl', ('general', 'testScripts') | 402 params, 'testIndex.html.tmpl', ('general', 'testScripts') |
417 ) | 403 ) |
418 | 404 |
419 zipdata = files.zipToString() | 405 zipdata = files.zipToString() |
420 signature = None | 406 signature = None |
421 pubkey = None | 407 pubkey = None |
422 if keyFile != None: | 408 if keyFile != None: |
423 signature = signBinary(zipdata, keyFile) | 409 signature = signBinary(zipdata, keyFile) |
424 pubkey = getPublicKey(keyFile) | 410 pubkey = getPublicKey(keyFile) |
425 writePackage(outFile, pubkey, signature, zipdata) | 411 writePackage(outFile, pubkey, signature, zipdata) |
OLD | NEW |