| OLD | NEW |
| 1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public |
| 2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this |
| 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
| 4 | 4 |
| 5 import re | 5 import re |
| 6 import os | 6 import os |
| 7 import sys | 7 import sys |
| 8 import codecs | 8 import codecs |
| 9 import json | 9 import json |
| 10 import urlparse | 10 import urlparse |
| 11 import urllib | 11 import urllib |
| 12 import urllib2 | 12 import urllib2 |
| 13 import mimetypes | 13 import mimetypes |
| 14 from StringIO import StringIO | 14 from StringIO import StringIO |
| 15 from ConfigParser import SafeConfigParser | 15 from ConfigParser import SafeConfigParser |
| 16 from zipfile import ZipFile | 16 from zipfile import ZipFile |
| 17 from xml.parsers.expat import ParserCreate, XML_PARAM_ENTITY_PARSING_ALWAYS | 17 from xml.parsers.expat import ParserCreate, XML_PARAM_ENTITY_PARSING_ALWAYS |
| 18 | 18 |
| 19 langMappingGecko = { | 19 CROWDIN_LANG_MAPPING = { |
| 20 'bn-BD': 'bn', | |
| 21 'br': 'br-FR', | 20 'br': 'br-FR', |
| 22 'dsb': 'dsb-DE', | 21 'dsb': 'dsb-DE', |
| 23 'fj-FJ': 'fj', | 22 'es': 'es-ES', |
| 23 'fur': 'fur-IT', |
| 24 'fy': 'fy-NL', |
| 25 'ga': 'ga-IE', |
| 26 'gu': 'gu-IN', |
| 24 'hsb': 'hsb-DE', | 27 'hsb': 'hsb-DE', |
| 25 'hi-IN': 'hi', | 28 'hy': 'hy-AM', |
| 26 'ml': 'ml-IN', | 29 'ml': 'ml-IN', |
| 27 'nb-NO': 'nb', | 30 'nn': 'nn-NO', |
| 31 'pa': 'pa-IN', |
| 28 'rm': 'rm-CH', | 32 'rm': 'rm-CH', |
| 29 'ta-LK': 'ta', | 33 'si': 'si-LK', |
| 30 'wo-SN': 'wo', | 34 'sv': 'sv-SE', |
| 35 'ur': 'ur-PK', |
| 31 } | 36 } |
| 32 | 37 |
| 33 langMappingChrome = { | |
| 34 'es-419': 'es-MX', | |
| 35 'es': 'es-ES', | |
| 36 'sv': 'sv-SE', | |
| 37 'ml': 'ml-IN', | |
| 38 'gu': 'gu-IN', | |
| 39 } | |
| 40 | |
| 41 chromeLocales = [ | |
| 42 'am', | |
| 43 'ar', | |
| 44 'bg', | |
| 45 'bn', | |
| 46 'ca', | |
| 47 'cs', | |
| 48 'da', | |
| 49 'de', | |
| 50 'el', | |
| 51 'en-GB', | |
| 52 'en-US', | |
| 53 'es-419', | |
| 54 'es', | |
| 55 'et', | |
| 56 'fa', | |
| 57 'fi', | |
| 58 'fil', | |
| 59 'fr', | |
| 60 'gu', | |
| 61 'he', | |
| 62 'hi', | |
| 63 'hr', | |
| 64 'hu', | |
| 65 'id', | |
| 66 'it', | |
| 67 'ja', | |
| 68 'kn', | |
| 69 'ko', | |
| 70 'lt', | |
| 71 'lv', | |
| 72 'ml', | |
| 73 'mr', | |
| 74 'ms', | |
| 75 'nb', | |
| 76 'nl', | |
| 77 'pl', | |
| 78 'pt-BR', | |
| 79 'pt-PT', | |
| 80 'ro', | |
| 81 'ru', | |
| 82 'sk', | |
| 83 'sl', | |
| 84 'sr', | |
| 85 'sv', | |
| 86 'sw', | |
| 87 'ta', | |
| 88 'te', | |
| 89 'th', | |
| 90 'tr', | |
| 91 'uk', | |
| 92 'vi', | |
| 93 'zh-CN', | |
| 94 'zh-TW', | |
| 95 ] | |
| 96 | |
| 97 CROWDIN_AP_URL = 'https://api.crowdin.com/api/project' | 38 CROWDIN_AP_URL = 'https://api.crowdin.com/api/project' |
| 39 FIREFOX_RELEASES_URL = 'http://www.mozilla.org/en-US/firefox/all.html' |
| 40 FIREFOX_LP_URL = 'https://addons.mozilla.org/en-US/firefox/language-tools/' |
| 41 CHROMIUM_DEB_URL = 'https://packages.debian.org/sid/all/chromium-l10n/filelist' |
| 98 | 42 |
| 99 | 43 |
| 100 def crowdin_request(project_name, action, key, get={}, post_data=None, | 44 def crowdin_request(project_name, action, key, get={}, post_data=None, |
| 101 headers={}, raw=False): | 45 headers={}, raw=False): |
| 102 """Perform a call to crowdin and raise an Exception on failure.""" | 46 """Perform a call to crowdin and raise an Exception on failure.""" |
| 103 request = urllib2.Request( | 47 request = urllib2.Request( |
| 104 '{}/{}/{}?{}'.format(CROWDIN_AP_URL, | 48 '{}/{}/{}?{}'.format(CROWDIN_AP_URL, |
| 105 urllib.quote(project_name), | 49 urllib.quote(project_name), |
| 106 urllib.quote(action), | 50 urllib.quote(action), |
| 107 urllib.urlencode(dict(get, key=key, json=1))), | 51 urllib.urlencode(dict(get, key=key, json=1))), |
| (...skipping 30 matching lines...) Expand all Loading... |
| 138 | 82 |
| 139 | 83 |
| 140 def escapeEntity(value): | 84 def escapeEntity(value): |
| 141 return value.replace('&', '&').replace('<', '<').replace('>', '>')
.replace('"', '"') | 85 return value.replace('&', '&').replace('<', '<').replace('>', '>')
.replace('"', '"') |
| 142 | 86 |
| 143 | 87 |
| 144 def unescapeEntity(value): | 88 def unescapeEntity(value): |
| 145 return value.replace('&', '&').replace('<', '<').replace('>', '>')
.replace('"', '"') | 89 return value.replace('&', '&').replace('<', '<').replace('>', '>')
.replace('"', '"') |
| 146 | 90 |
| 147 | 91 |
| 148 def mapLocale(type, locale): | |
| 149 mapping = langMappingChrome if type == 'ISO-15897' else langMappingGecko | |
| 150 return mapping.get(locale, locale) | |
| 151 | |
| 152 | |
| 153 def parseDTDString(data, path): | 92 def parseDTDString(data, path): |
| 154 result = [] | 93 result = [] |
| 155 currentComment = [None] | 94 currentComment = [None] |
| 156 | 95 |
| 157 parser = ParserCreate() | 96 parser = ParserCreate() |
| 158 parser.UseForeignDTD(True) | 97 parser.UseForeignDTD(True) |
| 159 parser.SetParamEntityParsing(XML_PARAM_ENTITY_PARSING_ALWAYS) | 98 parser.SetParamEntityParsing(XML_PARAM_ENTITY_PARSING_ALWAYS) |
| 160 | 99 |
| 161 def ExternalEntityRefHandler(context, base, systemId, publicId): | 100 def ExternalEntityRefHandler(context, base, systemId, publicId): |
| 162 subparser = parser.ExternalEntityParserCreate(context, 'utf-8') | 101 subparser = parser.ExternalEntityParserCreate(context, 'utf-8') |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 297 for key, value in parsed.iteritems(): | 236 for key, value in parsed.iteritems(): |
| 298 if 'description' in value: | 237 if 'description' in value: |
| 299 del value['description'] | 238 del value['description'] |
| 300 | 239 |
| 301 file = codecs.open(path, 'wb', encoding='utf-8') | 240 file = codecs.open(path, 'wb', encoding='utf-8') |
| 302 json.dump(parsed, file, ensure_ascii=False, sort_keys=True, indent=2, separa
tors=(',', ': ')) | 241 json.dump(parsed, file, ensure_ascii=False, sort_keys=True, indent=2, separa
tors=(',', ': ')) |
| 303 file.close() | 242 file.close() |
| 304 | 243 |
| 305 | 244 |
| 306 def setupTranslations(localeConfig, projectName, key): | 245 def setupTranslations(localeConfig, projectName, key): |
| 307 # Make a new set from the locales list, mapping to Crowdin friendly format | 246 locales = set() |
| 308 locales = {mapLocale(localeConfig['name_format'], locale) | |
| 309 for locale in localeConfig['locales']} | |
| 310 | 247 |
| 311 # Fill up with locales that we don't have but the browser supports | 248 # Languages supported by Firefox |
| 312 if 'chrome' in localeConfig['target_platforms']: | 249 data = urllib2.urlopen(FIREFOX_RELEASES_URL).read() |
| 313 for locale in chromeLocales: | 250 for match in re.finditer(r'&lang=([\w\-]+)"', data): |
| 314 locales.add(mapLocale('ISO-15897', locale)) | 251 locales.add(match.group(1)) |
| 315 | 252 |
| 316 if 'gecko' in localeConfig['target_platforms']: | 253 # Languages supported by Firefox Language Packs |
| 317 firefoxLocales = urllib2.urlopen('http://www.mozilla.org/en-US/firefox/a
ll.html').read() | 254 data = urllib2.urlopen(FIREFOX_LP_URL).read() |
| 318 for match in re.finditer(r'&lang=([\w\-]+)"', firefoxLocales): | 255 for match in re.finditer(r'<tr>.*?</tr>', data, re.S): |
| 319 locales.add(mapLocale('BCP-47', match.group(1))) | 256 if match.group(0).find('Install Language Pack') >= 0: |
| 320 langPacks = urllib2.urlopen('https://addons.mozilla.org/en-US/firefox/la
nguage-tools/').read() | 257 match2 = re.search(r'lang="([\w\-]+)"', match.group(0)) |
| 321 for match in re.finditer(r'<tr>.*?</tr>', langPacks, re.S): | 258 if match2: |
| 322 if match.group(0).find('Install Language Pack') >= 0: | 259 locales.add(match2.group(1)) |
| 323 match2 = re.search(r'lang="([\w\-]+)"', match.group(0)) | |
| 324 if match2: | |
| 325 locales.add(mapLocale('BCP-47', match2.group(1))) | |
| 326 | 260 |
| 327 allowed = set() | 261 # Languages supported by Chrome (excluding es-419) |
| 328 allowedLocales = crowdin_request(projectName, 'supported-languages', key) | 262 data = urllib2.urlopen(CHROMIUM_DEB_URL).read() |
| 263 for match in re.finditer(r'locales/(?!es-419)([\w\-]+)\.pak', data): |
| 264 locales.add(match.group(1)) |
| 329 | 265 |
| 330 for locale in allowedLocales: | 266 # We don't translate indvidual dialects of languages |
| 331 allowed.add(locale['crowdin_code']) | 267 # other than English, Spanish, Portuguese and Chinese. |
| 268 for locale in list(locales): |
| 269 prefix = locale.split('-')[0] |
| 270 if prefix not in {'en', 'es', 'pt', 'zh'}: |
| 271 locales.remove(locale) |
| 272 locales.add(prefix) |
| 273 |
| 274 # Add languages with existing translations. |
| 275 locales.update(localeConfig['locales']) |
| 276 |
| 277 # Don't add the language we translate from as target translation. |
| 278 locales.remove(localeConfig['default_locale'].replace('_', '-')) |
| 279 |
| 280 # Convert to locales understood by Crowdin. |
| 281 locales = {CROWDIN_LANG_MAPPING.get(locale, locale) for locale in locales} |
| 282 allowed = {locale['crowdin_code'] for locale in |
| 283 crowdin_request(projectName, 'supported-languages', key)} |
| 332 if not allowed.issuperset(locales): | 284 if not allowed.issuperset(locales): |
| 333 print "Warning, following locales aren't allowed by server: " + ', '.joi
n(locales - allowed) | 285 print "Warning, following locales aren't allowed by server: " + ', '.joi
n(locales - allowed) |
| 334 | 286 |
| 335 locales = list(locales & allowed) | 287 locales = sorted(locales & allowed) |
| 336 locales.sort() | |
| 337 params = urllib.urlencode([('languages[]', locale) for locale in locales]) | 288 params = urllib.urlencode([('languages[]', locale) for locale in locales]) |
| 338 | |
| 339 crowdin_request(projectName, 'edit-project', key, post_data=params) | 289 crowdin_request(projectName, 'edit-project', key, post_data=params) |
| 340 | 290 |
| 341 | 291 |
| 342 def crowdin_prepare_upload(files): | 292 def crowdin_prepare_upload(files): |
| 343 """Create a post body and matching headers, which Crowdin can handle.""" | 293 """Create a post body and matching headers, which Crowdin can handle.""" |
| 344 boundary = '----------ThIs_Is_tHe_bouNdaRY_$' | 294 boundary = '----------ThIs_Is_tHe_bouNdaRY_$' |
| 345 body = '' | 295 body = '' |
| 346 for name, data in files: | 296 for name, data in files: |
| 347 body += ( | 297 body += ( |
| 348 '--{boundary}\r\n' | 298 '--{boundary}\r\n' |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 422 data = json.dumps({file: {'message': fileHandle.read()}}) | 372 data = json.dumps({file: {'message': fileHandle.read()}}) |
| 423 fileHandle.close() | 373 fileHandle.close() |
| 424 newName = file + '.json' | 374 newName = file + '.json' |
| 425 else: | 375 else: |
| 426 data = toJSON(path) | 376 data = toJSON(path) |
| 427 newName = file + '.json' | 377 newName = file + '.json' |
| 428 | 378 |
| 429 if data: | 379 if data: |
| 430 files.append((newName, data)) | 380 files.append((newName, data)) |
| 431 if len(files): | 381 if len(files): |
| 432 language = mapLocale(localeConfig['name_format'], locale) | 382 language = CROWDIN_LANG_MAPPING.get(locale, locale) |
| 433 data, headers = crowdin_prepare_upload(files) | 383 data, headers = crowdin_prepare_upload(files) |
| 434 crowdin_request(projectName, 'upload-translation', key, | 384 crowdin_request(projectName, 'upload-translation', key, |
| 435 {'language': language}, post_data=data, | 385 {'language': language}, post_data=data, |
| 436 headers=headers) | 386 headers=headers) |
| 437 | 387 |
| 438 | 388 |
| 439 def getTranslations(localeConfig, projectName, key): | 389 def getTranslations(localeConfig, projectName, key): |
| 440 """Download all available translations from crowdin. | 390 """Download all available translations from crowdin. |
| 441 | 391 |
| 442 Trigger crowdin to build the available export, wait for crowdin to | 392 Trigger crowdin to build the available export, wait for crowdin to |
| 443 finish the job and download the generated zip afterwards. | 393 finish the job and download the generated zip afterwards. |
| 444 """ | 394 """ |
| 445 crowdin_request(projectName, 'export', key) | 395 crowdin_request(projectName, 'export', key) |
| 446 | 396 |
| 447 result = crowdin_request(projectName, 'download/all.zip', key, raw=True) | 397 result = crowdin_request(projectName, 'download/all.zip', key, raw=True) |
| 448 zip = ZipFile(StringIO(result)) | 398 zip = ZipFile(StringIO(result)) |
| 449 dirs = {} | 399 dirs = {} |
| 450 | 400 |
| 451 normalizedDefaultLocale = localeConfig['default_locale'] | 401 normalizedDefaultLocale = localeConfig['default_locale'] |
| 452 if localeConfig['name_format'] == 'ISO-15897': | 402 if localeConfig['name_format'] == 'ISO-15897': |
| 453 normalizedDefaultLocale = normalizedDefaultLocale.replace('_', '-') | 403 normalizedDefaultLocale = normalizedDefaultLocale.replace('_', '-') |
| 454 normalizedDefaultLocale = mapLocale(localeConfig['name_format'], | 404 normalizedDefaultLocale = CROWDIN_LANG_MAPPING.get(normalizedDefaultLocale, |
| 455 normalizedDefaultLocale) | 405 normalizedDefaultLocale) |
| 456 | 406 |
| 457 for info in zip.infolist(): | 407 for info in zip.infolist(): |
| 458 if not info.filename.endswith('.json'): | 408 if not info.filename.endswith('.json'): |
| 459 continue | 409 continue |
| 460 | 410 |
| 461 dir, file = os.path.split(info.filename) | 411 dir, file = os.path.split(info.filename) |
| 462 if not re.match(r'^[\w\-]+$', dir) or dir == normalizedDefaultLocale: | 412 if not re.match(r'^[\w\-]+$', dir) or dir == normalizedDefaultLocale: |
| 463 continue | 413 continue |
| 464 if localeConfig['file_format'] == 'chrome-json' and file.count('.') == 1
: | 414 if localeConfig['file_format'] == 'chrome-json' and file.count('.') == 1
: |
| 465 origFile = file | 415 origFile = file |
| 466 else: | 416 else: |
| 467 origFile = re.sub(r'\.json$', '', file) | 417 origFile = re.sub(r'\.json$', '', file) |
| 468 if (localeConfig['file_format'] == 'gecko-dtd' and | 418 if (localeConfig['file_format'] == 'gecko-dtd' and |
| 469 not origFile.endswith('.dtd') and | 419 not origFile.endswith('.dtd') and |
| 470 not origFile.endswith('.properties')): | 420 not origFile.endswith('.properties')): |
| 471 continue | 421 continue |
| 472 | 422 |
| 473 if localeConfig['name_format'] == 'ISO-15897': | 423 for key, value in CROWDIN_LANG_MAPPING.iteritems(): |
| 474 mapping = langMappingChrome | |
| 475 else: | |
| 476 mapping = langMappingGecko | |
| 477 | |
| 478 for key, value in mapping.iteritems(): | |
| 479 if value == dir: | 424 if value == dir: |
| 480 dir = key | 425 dir = key |
| 481 if localeConfig['name_format'] == 'ISO-15897': | 426 if localeConfig['name_format'] == 'ISO-15897': |
| 482 dir = dir.replace('-', '_') | 427 dir = dir.replace('-', '_') |
| 483 | 428 |
| 484 data = zip.open(info.filename).read() | 429 data = zip.open(info.filename).read() |
| 485 if data == '[]': | 430 if data == '[]': |
| 486 continue | 431 continue |
| 487 | 432 |
| 488 if not dir in dirs: | 433 if not dir in dirs: |
| (...skipping 16 matching lines...) Expand all Loading... |
| 505 | 450 |
| 506 # Remove any extra files | 451 # Remove any extra files |
| 507 for dir, files in dirs.iteritems(): | 452 for dir, files in dirs.iteritems(): |
| 508 baseDir = os.path.join(localeConfig['base_path'], dir) | 453 baseDir = os.path.join(localeConfig['base_path'], dir) |
| 509 if not os.path.exists(baseDir): | 454 if not os.path.exists(baseDir): |
| 510 continue | 455 continue |
| 511 for file in os.listdir(baseDir): | 456 for file in os.listdir(baseDir): |
| 512 path = os.path.join(baseDir, file) | 457 path = os.path.join(baseDir, file) |
| 513 if os.path.isfile(path) and (file.endswith('.json') or file.endswith
('.properties') or file.endswith('.dtd')) and not file in files: | 458 if os.path.isfile(path) and (file.endswith('.json') or file.endswith
('.properties') or file.endswith('.dtd')) and not file in files: |
| 514 os.remove(path) | 459 os.remove(path) |
| OLD | NEW |