LEFT | RIGHT |
1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public |
2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this |
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
4 | 4 |
5 import errno | 5 import errno |
6 import io | 6 import io |
7 import json | 7 import json |
8 import os | 8 import os |
9 import re | 9 import re |
10 from StringIO import StringIO | 10 from StringIO import StringIO |
11 import struct | 11 import struct |
12 import sys | 12 import sys |
13 import collections | 13 import collections |
| 14 import glob |
14 | 15 |
15 from packager import (readMetadata, getDefaultFileName, getBuildVersion, | 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion, |
16 getTemplate, Files) | 17 getTemplate, Files) |
17 | 18 |
18 defaultLocale = 'en_US' | 19 defaultLocale = 'en_US' |
19 | 20 |
20 | 21 |
21 def getIgnoredFiles(params): | 22 def getIgnoredFiles(params): |
22 return {'store.description'} | 23 return {'store.description'} |
23 | 24 |
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
184 | 185 |
185 | 186 |
186 def toJson(data): | 187 def toJson(data): |
187 return json.dumps( | 188 return json.dumps( |
188 data, ensure_ascii=False, sort_keys=True, | 189 data, ensure_ascii=False, sort_keys=True, |
189 indent=2, separators=(',', ': ') | 190 indent=2, separators=(',', ': ') |
190 ).encode('utf-8') + '\n' | 191 ).encode('utf-8') + '\n' |
191 | 192 |
192 | 193 |
193 def import_locales(params, files): | 194 def import_locales(params, files): |
194 import localeTools | 195 for item in params['metadata'].items('import_locales'): |
195 | 196 filename, keys = item |
196 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as | 197 for sourceFile in glob.glob(os.path.join(os.path.dirname(item.source), |
197 # separator instead. | 198 *filename.split('/'))): |
198 convert_locale_code = lambda code: code.replace('-', '_') | 199 locale = sourceFile.split(os.path.sep)[-2] |
199 | 200 targetFile = os.path.join('_locales', locale, 'messages.json') |
200 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome | 201 data = json.loads(files.get(targetFile, '{}').decode('utf-8')) |
201 # locales to themselves, merely with the dash as separator. | |
202 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal
es} | |
203 | |
204 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping). | |
205 for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems
(): | |
206 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale | |
207 | |
208 # Now convert values to Gecko locales (use Gecko => Crowdin mapping). | |
209 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()} | |
210 for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems()
: | |
211 if crowdin_locale in reverse_mapping: | |
212 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale | |
213 | |
214 for target, source in locale_mapping.iteritems(): | |
215 targetFile = '_locales/%s/messages.json' % target | |
216 if not targetFile in files: | |
217 continue | |
218 | |
219 for item in params['metadata'].items('import_locales'): | |
220 fileName, keys = item | |
221 parts = map(lambda n: source if n == '*' else n, fileName.split('/')
) | |
222 sourceFile = os.path.join(os.path.dirname(item.source), *parts) | |
223 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom
plete') | |
224 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker
): | |
225 continue | |
226 | |
227 data = json.loads(files[targetFile].decode('utf-8')) | |
228 | 202 |
229 try: | 203 try: |
230 with io.open(sourceFile, 'r', encoding='utf-8') as handle: | 204 with io.open(sourceFile, 'r', encoding='utf-8') as handle: |
231 sourceData = json.load(handle) | 205 sourceData = json.load(handle) |
232 | 206 |
233 # Resolve wildcard imports | 207 # Resolve wildcard imports |
234 if keys == '*' or keys == '=*': | 208 if keys == '*': |
235 importList = sourceData.keys() | 209 importList = sourceData.keys() |
236 importList = filter(lambda k: not k.startswith('_'), importL
ist) | 210 importList = filter(lambda k: not k.startswith('_'), importL
ist) |
237 if keys == '=*': | |
238 importList = map(lambda k: '=' + k, importList) | |
239 keys = ' '.join(importList) | 211 keys = ' '.join(importList) |
240 | 212 |
241 for stringID in keys.split(): | 213 for stringID in keys.split(): |
242 noMangling = False | |
243 if stringID.startswith('='): | |
244 stringID = stringID[1:] | |
245 noMangling = True | |
246 | |
247 if stringID in sourceData: | 214 if stringID in sourceData: |
248 if noMangling: | 215 if stringID in data: |
249 key = re.sub(r'\W', '_', stringID) | 216 print ('Warning: locale string {} defined multiple' |
250 else: | 217 ' times').format(stringID) |
251 key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub(
r'\W', '_', stringID) | 218 |
252 if key in data: | 219 data[stringID] = sourceData[stringID] |
253 print 'Warning: locale string %s defined multiple ti
mes' % key | |
254 | |
255 data[key] = sourceData[stringID] | |
256 except Exception as e: | 220 except Exception as e: |
257 print 'Warning: error importing locale data from %s: %s' % (sour
ceFile, e) | 221 print 'Warning: error importing locale data from %s: %s' % (sour
ceFile, e) |
258 | 222 |
259 files[targetFile] = toJson(data) | 223 files[targetFile] = toJson(data) |
260 | 224 |
261 | 225 |
262 def truncate(text, length_limit): | 226 def truncate(text, length_limit): |
263 if len(text) <= length_limit: | 227 if len(text) <= length_limit: |
264 return text | 228 return text |
265 return text[:length_limit - 1].rstrip() + u'\u2026' | 229 return text[:length_limit - 1].rstrip() + u'\u2026' |
266 | 230 |
267 | 231 |
268 def fixTranslationsForChrome(files): | 232 def fix_translations_for_chrome(files): |
269 defaults = {} | 233 defaults = {} |
270 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) | 234 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) |
271 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): | 235 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): |
272 name = match.group(1) | 236 name = match.group(1) |
273 defaults[name] = data[name] | 237 defaults[name] = data[name] |
274 | 238 |
275 limits = {} | 239 limits = {} |
276 manifest = json.loads(files['manifest.json']) | 240 manifest = json.loads(files['manifest.json']) |
277 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): | 241 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): |
278 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) | 242 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) |
279 if match: | 243 if match: |
280 limits[match.group(1)] = limit | 244 limits[match.group(1)] = limit |
281 | 245 |
282 for path in list(files): | 246 for path in list(files): |
283 match = re.search(r'^_locales/(?:es_(AR|CL|(MX))|[^/]+)/(.*)', path) | 247 match = re.search(r'^_locales/(?:es_(AR|CL|(MX))|[^/]+)/(.*)', path) |
284 if match: | 248 if not match: |
285 isLatAm, isMexican, filename = match.groups() | 249 continue |
286 | 250 |
287 # The Chrome Web Store requires messages used in manifest.json to be | 251 # The Chrome Web Store requires messages used in manifest.json to |
288 # present in all languages, and enforces length limits on extension | 252 # be present in all languages, and enforces length limits on |
289 # name and description. | 253 # extension name and description. |
290 if filename == 'messages.json': | 254 is_latam, is_mexican, filename = match.groups() |
291 data = json.loads(files[path]) | 255 if filename == 'messages.json': |
292 for name, info in defaults.iteritems(): | 256 data = json.loads(files[path]) |
293 data.setdefault(name, info) | 257 for name, info in defaults.iteritems(): |
294 for name, limit in limits.iteritems(): | 258 data.setdefault(name, info) |
295 info = data.get(name) | 259 for name, limit in limits.iteritems(): |
296 if info: | 260 info = data.get(name) |
297 info['message'] = truncate(info['message'], limit) | 261 if info: |
298 files[path] = toJson(data) | 262 info['message'] = truncate(info['message'], limit) |
299 | 263 files[path] = toJson(data) |
300 # Chrome combines Latin American dialects of Spanish into es-419. | 264 |
301 if isLatAm: | 265 # Chrome combines Latin American dialects of Spanish into es-419. |
302 data = files.pop(path) | 266 if is_latam: |
303 if isMexican: | 267 data = files.pop(path) |
304 files['_locales/es_419/' + filename] = data | 268 if is_mexican: |
| 269 files['_locales/es_419/' + filename] = data |
305 | 270 |
306 | 271 |
307 def signBinary(zipdata, keyFile): | 272 def signBinary(zipdata, keyFile): |
308 from Crypto.Hash import SHA | 273 from Crypto.Hash import SHA |
309 from Crypto.PublicKey import RSA | 274 from Crypto.PublicKey import RSA |
310 from Crypto.Signature import PKCS1_v1_5 | 275 from Crypto.Signature import PKCS1_v1_5 |
311 | 276 |
312 try: | 277 try: |
313 with open(keyFile, 'rb') as file: | 278 with open(keyFile, 'rb') as file: |
314 key = RSA.importKey(file.read()) | 279 key = RSA.importKey(file.read()) |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
374 files.preprocess( | 339 files.preprocess( |
375 [f for f, _ in metadata.items('preprocess')], | 340 [f for f, _ in metadata.items('preprocess')], |
376 {'needsExt': True} | 341 {'needsExt': True} |
377 ) | 342 ) |
378 | 343 |
379 if metadata.has_section('import_locales'): | 344 if metadata.has_section('import_locales'): |
380 import_locales(params, files) | 345 import_locales(params, files) |
381 | 346 |
382 files['manifest.json'] = createManifest(params, files) | 347 files['manifest.json'] = createManifest(params, files) |
383 if type == 'chrome': | 348 if type == 'chrome': |
384 fixTranslationsForChrome(files) | 349 fix_translations_for_chrome(files) |
385 | 350 |
386 if devenv: | 351 if devenv: |
387 import buildtools | 352 import buildtools |
388 import random | 353 import random |
389 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js
'), relpath='devenvPoller__.js') | 354 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js
'), relpath='devenvPoller__.js') |
390 files['devenvVersion__'] = str(random.random()) | 355 files['devenvVersion__'] = str(random.random()) |
391 | 356 |
392 if metadata.has_option('general', 'testScripts'): | 357 if metadata.has_option('general', 'testScripts'): |
393 files['qunit/index.html'] = createScriptPage( | 358 files['qunit/index.html'] = createScriptPage( |
394 params, 'testIndex.html.tmpl', ('general', 'testScripts') | 359 params, 'testIndex.html.tmpl', ('general', 'testScripts') |
395 ) | 360 ) |
396 | 361 |
397 zipdata = files.zipToString() | 362 zipdata = files.zipToString() |
398 signature = None | 363 signature = None |
399 pubkey = None | 364 pubkey = None |
400 if keyFile != None: | 365 if keyFile != None: |
401 signature = signBinary(zipdata, keyFile) | 366 signature = signBinary(zipdata, keyFile) |
402 pubkey = getPublicKey(keyFile) | 367 pubkey = getPublicKey(keyFile) |
403 writePackage(outFile, pubkey, signature, zipdata) | 368 writePackage(outFile, pubkey, signature, zipdata) |
LEFT | RIGHT |