Left: | ||
Right: |
OLD | NEW |
---|---|
1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public |
2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this |
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
4 | 4 |
5 import errno | 5 import errno |
6 import io | 6 import io |
7 import json | 7 import json |
8 import os | 8 import os |
9 import re | 9 import re |
10 from StringIO import StringIO | 10 from StringIO import StringIO |
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
183 ).encode('utf-8') | 183 ).encode('utf-8') |
184 | 184 |
185 | 185 |
186 def toJson(data): | 186 def toJson(data): |
187 return json.dumps( | 187 return json.dumps( |
188 data, ensure_ascii=False, sort_keys=True, | 188 data, ensure_ascii=False, sort_keys=True, |
189 indent=2, separators=(',', ': ') | 189 indent=2, separators=(',', ': ') |
190 ).encode('utf-8') + '\n' | 190 ).encode('utf-8') + '\n' |
191 | 191 |
192 | 192 |
193 def import_string_webext(data, key, source): | |
194 """Import a single translation from the source dictionary into data""" | |
195 data[key] = source | |
196 | |
197 | |
198 def import_string_gecko(data, key, value): | |
199 """Import Gecko-style locales into data. | |
200 | |
201 Only sets {'message': value} in the data-dictionary, after stripping | |
202 undesired Gecko-style access keys. | |
203 """ | |
204 match = re.search(r'^(.*?)\s*\(&.\)$', value) | |
205 if match: | |
206 value = match.group(1) | |
207 else: | |
208 index = value.find('&') | |
209 if index >= 0: | |
210 value = value[0:index] + value[index + 1:] | |
211 | |
212 data[key] = {'message': value} | |
213 | |
214 | |
215 def import_locales(params, files): | 193 def import_locales(params, files): |
216 import localeTools | 194 import localeTools |
217 | 195 |
218 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as | 196 # FIXME: localeTools doesn't use real Chrome locales, it uses dash as |
219 # separator instead. | 197 # separator instead. |
220 convert_locale_code = lambda code: code.replace('-', '_') | 198 convert_locale_code = lambda code: code.replace('-', '_') |
221 | 199 |
222 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome | 200 # We need to map Chrome locales to Gecko locales. Start by mapping Chrome |
223 # locales to themselves, merely with the dash as separator. | 201 # locales to themselves, merely with the dash as separator. |
224 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal es} | 202 locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocal es} |
225 | 203 |
226 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping). | 204 # Convert values to Crowdin locales first (use Chrome => Crowdin mapping). |
227 for chrome_locale, crowdin_locale in localeTools.CROWDIN_LANG_MAPPING.iterit ems(): | 205 for chrome_locale, crowdin_locale in localeTools.CROWDIN_LANG_MAPPING.iterit ems(): |
tlucas
2017/10/04 11:48:40
Note: result of rebasing
| |
228 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale | 206 locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale |
229 | 207 |
230 # Now convert values to Gecko locales (use Gecko => Crowdin mapping). | 208 # Now convert values to Gecko locales (use Gecko => Crowdin mapping). |
231 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()} | 209 reverse_mapping = {v: k for k, v in locale_mapping.iteritems()} |
232 for gecko_locale, crowdin_locale in localeTools.CROWDIN_LANG_MAPPING.iterite ms(): | 210 for gecko_locale, crowdin_locale in localeTools.CROWDIN_LANG_MAPPING.iterite ms(): |
tlucas
2017/10/04 11:48:40
Note: result of rebasing
| |
233 if crowdin_locale in reverse_mapping: | 211 if crowdin_locale in reverse_mapping: |
234 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale | 212 locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale |
235 | 213 |
236 for target, source in locale_mapping.iteritems(): | 214 for target, source in locale_mapping.iteritems(): |
237 targetFile = '_locales/%s/messages.json' % target | 215 targetFile = '_locales/%s/messages.json' % target |
238 if not targetFile in files: | 216 if not targetFile in files: |
239 continue | 217 continue |
240 | 218 |
241 for item in params['metadata'].items('import_locales'): | 219 for item in params['metadata'].items('import_locales'): |
242 fileName, keys = item | 220 fileName, keys = item |
243 parts = map(lambda n: source if n == '*' else n, fileName.split('/') ) | 221 parts = map(lambda n: source if n == '*' else n, fileName.split('/') ) |
244 sourceFile = os.path.join(os.path.dirname(item.source), *parts) | 222 sourceFile = os.path.join(os.path.dirname(item.source), *parts) |
245 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom plete') | 223 incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incom plete') |
246 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker ): | 224 if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker ): |
247 continue | 225 continue |
248 | 226 |
249 data = json.loads(files[targetFile].decode('utf-8')) | 227 data = json.loads(files[targetFile].decode('utf-8')) |
250 | 228 |
251 try: | 229 try: |
252 # The WebExtensions (.json) and Gecko format provide | 230 with io.open(sourceFile, 'r', encoding='utf-8') as handle: |
253 # translations differently and/or provide additional | 231 sourceData = json.load(handle) |
254 # information like e.g. "placeholders". We want to adhere to | |
255 # that and preserve the addtional info. | |
256 if sourceFile.endswith('.json'): | |
257 with io.open(sourceFile, 'r', encoding='utf-8') as handle: | |
258 sourceData = json.load(handle) | |
259 import_string = import_string_webext | |
260 else: | |
261 sourceData = localeTools.readFile(sourceFile) | |
262 import_string = import_string_gecko | |
263 | 232 |
264 # Resolve wildcard imports | 233 # Resolve wildcard imports |
265 if keys == '*' or keys == '=*': | 234 if keys == '*' or keys == '=*': |
266 importList = sourceData.keys() | 235 importList = sourceData.keys() |
267 importList = filter(lambda k: not k.startswith('_'), importL ist) | 236 importList = filter(lambda k: not k.startswith('_'), importL ist) |
268 if keys == '=*': | 237 if keys == '=*': |
269 importList = map(lambda k: '=' + k, importList) | 238 importList = map(lambda k: '=' + k, importList) |
270 keys = ' '.join(importList) | 239 keys = ' '.join(importList) |
271 | 240 |
272 for stringID in keys.split(): | 241 for stringID in keys.split(): |
273 noMangling = False | 242 noMangling = False |
274 if stringID.startswith('='): | 243 if stringID.startswith('='): |
275 stringID = stringID[1:] | 244 stringID = stringID[1:] |
276 noMangling = True | 245 noMangling = True |
277 | 246 |
278 if stringID in sourceData: | 247 if stringID in sourceData: |
279 if noMangling: | 248 if noMangling: |
280 key = re.sub(r'\W', '_', stringID) | 249 key = re.sub(r'\W', '_', stringID) |
281 else: | 250 else: |
282 key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub( r'\W', '_', stringID) | 251 key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub( r'\W', '_', stringID) |
283 if key in data: | 252 if key in data: |
284 print 'Warning: locale string %s defined multiple ti mes' % key | 253 print 'Warning: locale string %s defined multiple ti mes' % key |
285 | 254 |
286 import_string(data, key, sourceData[stringID]) | 255 data[key] = sourceData[stringID] |
287 except Exception as e: | 256 except Exception as e: |
288 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e) | 257 print 'Warning: error importing locale data from %s: %s' % (sour ceFile, e) |
289 | 258 |
290 files[targetFile] = toJson(data) | 259 files[targetFile] = toJson(data) |
291 | 260 |
292 | 261 |
293 def truncate(text, length_limit): | 262 def truncate(text, length_limit): |
294 if len(text) <= length_limit: | 263 if len(text) <= length_limit: |
295 return text | 264 return text |
296 return text[:length_limit - 1].rstrip() + u'\u2026' | 265 return text[:length_limit - 1].rstrip() + u'\u2026' |
297 | 266 |
298 | 267 |
299 def fix_translations_for_chrome(files): | 268 def fix_translations_for_chrome(files): |
tlucas
2017/10/04 11:48:40
Note: result of rebasing
| |
300 defaults = {} | 269 defaults = {} |
301 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) | 270 data = json.loads(files['_locales/%s/messages.json' % defaultLocale]) |
302 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): | 271 for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']): |
303 name = match.group(1) | 272 name = match.group(1) |
304 defaults[name] = data[name] | 273 defaults[name] = data[name] |
305 | 274 |
306 limits = {} | 275 limits = {} |
307 manifest = json.loads(files['manifest.json']) | 276 manifest = json.loads(files['manifest.json']) |
308 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): | 277 for key, limit in (('name', 45), ('description', 132), ('short_name', 12)): |
309 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) | 278 match = re.search(r'__MSG_(\S+)__', manifest.get(key, '')) |
(...skipping 16 matching lines...) Expand all Loading... | |
326 for name, limit in limits.iteritems(): | 295 for name, limit in limits.iteritems(): |
327 info = data.get(name) | 296 info = data.get(name) |
328 if info: | 297 if info: |
329 info['message'] = truncate(info['message'], limit) | 298 info['message'] = truncate(info['message'], limit) |
330 files[path] = toJson(data) | 299 files[path] = toJson(data) |
331 | 300 |
332 # Chrome combines Latin American dialects of Spanish into es-419. | 301 # Chrome combines Latin American dialects of Spanish into es-419. |
333 if is_latam: | 302 if is_latam: |
334 data = files.pop(path) | 303 data = files.pop(path) |
335 if is_mexican: | 304 if is_mexican: |
336 files['_locales/es_419/' + filename] = data | 305 files['_locales/es_419/' + filename] = data |
tlucas
2017/10/04 11:48:40
Note: result of rebasing
| |
337 | 306 |
338 | 307 |
339 def signBinary(zipdata, keyFile): | 308 def signBinary(zipdata, keyFile): |
340 from Crypto.Hash import SHA | 309 from Crypto.Hash import SHA |
341 from Crypto.PublicKey import RSA | 310 from Crypto.PublicKey import RSA |
342 from Crypto.Signature import PKCS1_v1_5 | 311 from Crypto.Signature import PKCS1_v1_5 |
343 | 312 |
344 try: | 313 try: |
345 with open(keyFile, 'rb') as file: | 314 with open(keyFile, 'rb') as file: |
346 key = RSA.importKey(file.read()) | 315 key = RSA.importKey(file.read()) |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
406 files.preprocess( | 375 files.preprocess( |
407 [f for f, _ in metadata.items('preprocess')], | 376 [f for f, _ in metadata.items('preprocess')], |
408 {'needsExt': True} | 377 {'needsExt': True} |
409 ) | 378 ) |
410 | 379 |
411 if metadata.has_section('import_locales'): | 380 if metadata.has_section('import_locales'): |
412 import_locales(params, files) | 381 import_locales(params, files) |
413 | 382 |
414 files['manifest.json'] = createManifest(params, files) | 383 files['manifest.json'] = createManifest(params, files) |
415 if type == 'chrome': | 384 if type == 'chrome': |
416 fix_translations_for_chrome(files) | 385 fix_translations_for_chrome(files) |
tlucas
2017/10/04 11:48:40
Note: result of rebasing
| |
417 | 386 |
418 if devenv: | 387 if devenv: |
419 import buildtools | 388 import buildtools |
420 import random | 389 import random |
421 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js') | 390 files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js '), relpath='devenvPoller__.js') |
422 files['devenvVersion__'] = str(random.random()) | 391 files['devenvVersion__'] = str(random.random()) |
423 | 392 |
424 if metadata.has_option('general', 'testScripts'): | 393 if metadata.has_option('general', 'testScripts'): |
425 files['qunit/index.html'] = createScriptPage( | 394 files['qunit/index.html'] = createScriptPage( |
426 params, 'testIndex.html.tmpl', ('general', 'testScripts') | 395 params, 'testIndex.html.tmpl', ('general', 'testScripts') |
427 ) | 396 ) |
428 | 397 |
429 zipdata = files.zipToString() | 398 zipdata = files.zipToString() |
430 signature = None | 399 signature = None |
431 pubkey = None | 400 pubkey = None |
432 if keyFile != None: | 401 if keyFile != None: |
433 signature = signBinary(zipdata, keyFile) | 402 signature = signBinary(zipdata, keyFile) |
434 pubkey = getPublicKey(keyFile) | 403 pubkey = getPublicKey(keyFile) |
435 writePackage(outFile, pubkey, signature, zipdata) | 404 writePackage(outFile, pubkey, signature, zipdata) |
OLD | NEW |