| LEFT | RIGHT |
| 1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public |
| 2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this |
| 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
| 4 | 4 |
| 5 import re | 5 import re |
| 6 import os | 6 import os |
| 7 import sys | 7 import sys |
| 8 import codecs | 8 import codecs |
| 9 import json | 9 import json |
| 10 import urlparse | 10 import urlparse |
| 11 import urllib | 11 import urllib |
| 12 import urllib2 | 12 import urllib2 |
| 13 import mimetypes |
| 13 from StringIO import StringIO | 14 from StringIO import StringIO |
| 14 from ConfigParser import SafeConfigParser | 15 from ConfigParser import SafeConfigParser |
| 15 from zipfile import ZipFile | 16 from zipfile import ZipFile |
| 16 from xml.parsers.expat import ParserCreate, XML_PARAM_ENTITY_PARSING_ALWAYS | 17 from xml.parsers.expat import ParserCreate, XML_PARAM_ENTITY_PARSING_ALWAYS |
| 17 | 18 |
| 18 langMappingGecko = { | 19 langMappingGecko = { |
| 19 'bn-BD': 'bn', | 20 'bn-BD': 'bn', |
| 20 'br': 'br-FR', | 21 'br': 'br-FR', |
| 21 'dsb': 'dsb-DE', | 22 'dsb': 'dsb-DE', |
| 22 'fj-FJ': 'fj', | 23 'fj-FJ': 'fj', |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 86 'ta', | 87 'ta', |
| 87 'te', | 88 'te', |
| 88 'th', | 89 'th', |
| 89 'tr', | 90 'tr', |
| 90 'uk', | 91 'uk', |
| 91 'vi', | 92 'vi', |
| 92 'zh-CN', | 93 'zh-CN', |
| 93 'zh-TW', | 94 'zh-TW', |
| 94 ] | 95 ] |
| 95 | 96 |
| 96 CROWDIN_AP_URL = 'https://api.crowdin.com/api/project/{}/{}' | 97 CROWDIN_AP_URL = 'https://api.crowdin.com/api/project' |
| 97 | |
| 98 | |
| 99 def crowdin_url(project_name, action, key, get={}): | |
| 100 """Create a valid url for a crowdin endpoint.""" | |
| 101 url = CROWDIN_AP_URL.format(project_name, action) | |
| 102 get['key'] = key | |
| 103 get['json'] = 1 | |
| 104 | |
| 105 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) | |
| 106 | |
| 107 query = urlparse.parse_qs(query) | |
| 108 query.update(get) | |
| 109 | |
| 110 return urlparse.urlunparse(( | |
| 111 scheme, netloc, path, params, urllib.urlencode(query), fragment | |
| 112 )) | |
| 113 | 98 |
| 114 | 99 |
| 115 def crowdin_request(project_name, action, key, get={}, post_data=None, | 100 def crowdin_request(project_name, action, key, get={}, post_data=None, |
| 116 headers={}, raw=False): | 101 headers={}, raw=False): |
| 117 """Perform a call to crowdin and raise an Exception on failure.""" | 102 """Perform a call to crowdin and raise an Exception on failure.""" |
| 118 request = urllib2.Request( | 103 request = urllib2.Request( |
| 119 crowdin_url(project_name, action, key, get), | 104 '{}/{}/{}?{}'.format(CROWDIN_AP_URL, |
| 105 urllib.quote(project_name), |
| 106 urllib.quote(action), |
| 107 urllib.urlencode(dict(get, key=key, json=1))), |
| 120 post_data, | 108 post_data, |
| 121 headers, | 109 headers, |
| 122 ) | 110 ) |
| 123 | 111 |
| 124 try: | 112 try: |
| 125 result = urllib2.urlopen(request).read() | 113 result = urllib2.urlopen(request).read() |
| 126 except urllib2.HTTPError as e: | 114 except urllib2.HTTPError as e: |
| 127 raise Exception('Server returned HTTP Error {}:\n{}'.format(e.code, | 115 raise Exception('Server returned HTTP Error {}:\n{}'.format(e.code, |
| 128 e.read())) | 116 e.read())) |
| 129 | 117 |
| (...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 348 locales.sort() | 336 locales.sort() |
| 349 params = urllib.urlencode([('languages[]', locale) for locale in locales]) | 337 params = urllib.urlencode([('languages[]', locale) for locale in locales]) |
| 350 | 338 |
| 351 crowdin_request(projectName, 'edit-project', key, post_data=params) | 339 crowdin_request(projectName, 'edit-project', key, post_data=params) |
| 352 | 340 |
| 353 | 341 |
| 354 def crowdin_prepare_upload(files): | 342 def crowdin_prepare_upload(files): |
| 355 """Create a post body and matching headers, which Crowdin can handle.""" | 343 """Create a post body and matching headers, which Crowdin can handle.""" |
| 356 boundary = '----------ThIs_Is_tHe_bouNdaRY_$' | 344 boundary = '----------ThIs_Is_tHe_bouNdaRY_$' |
| 357 body = '' | 345 body = '' |
| 358 for file, data in files: | 346 for name, data in files: |
| 359 body += '--%s\r\n' % boundary | 347 mimetype = mimetypes.guess_type(name)[0] |
| 360 body += 'Content-Disposition: form-data; name="files[%s]"; filename="%s"
\r\n' % (file, file) | 348 body += ( |
| 361 body += 'Content-Type: application/octet-stream\r\n' | 349 '--{boundary}\r\n' |
| 362 body += 'Content-Transfer-Encoding: binary\r\n' | 350 'Content-Disposition: form-data; name="files[{name}]"; ' |
| 363 body += '\r\n' + data + '\r\n' | 351 'filename="{name}"\r\n' |
| 364 body += '--%s--\r\n' % boundary | 352 'Content-Type: {mimetype}; charset=utf-8\r\n' |
| 353 'Content-Transfer-Encoding: binary\r\n' |
| 354 '\r\n{data}\r\n' |
| 355 '--{boundary}--\r\n' |
| 356 ).format(boundary=boundary, name=name, data=data, mimetype=mimetype) |
| 365 | 357 |
| 366 body = body.encode('utf-8') | 358 body = body.encode('utf-8') |
| 367 return ( | 359 return ( |
| 368 StringIO(body), | 360 StringIO(body), |
| 369 { | 361 { |
| 370 'Content-Type': ('multipart/form-data; ; charset=utf-8; ' | 362 'Content-Type': ('multipart/form-data; boundary=' + boundary), |
| 371 'boundary=' + boundary), | |
| 372 'Content-Length': len(body) | 363 'Content-Length': len(body) |
| 373 } | 364 }, |
| 374 ) | 365 ) |
| 375 | 366 |
| 376 | 367 |
| 377 def updateTranslationMaster(localeConfig, metadata, dir, projectName, key): | 368 def updateTranslationMaster(localeConfig, metadata, dir, projectName, key): |
| 378 result = crowdin_request(projectName, 'info', key) | 369 result = crowdin_request(projectName, 'info', key) |
| 379 | 370 |
| 380 existing = set(map(lambda f: f['name'], result['files'])) | 371 existing = set(map(lambda f: f['name'], result['files'])) |
| 381 add = [] | 372 add = [] |
| 382 update = [] | 373 update = [] |
| 383 for file in os.listdir(dir): | 374 for file in os.listdir(dir): |
| (...skipping 12 matching lines...) Expand all Loading... |
| 396 newName = file + '.json' | 387 newName = file + '.json' |
| 397 | 388 |
| 398 if data: | 389 if data: |
| 399 if newName in existing: | 390 if newName in existing: |
| 400 update.append((newName, data)) | 391 update.append((newName, data)) |
| 401 existing.remove(newName) | 392 existing.remove(newName) |
| 402 else: | 393 else: |
| 403 add.append((newName, data)) | 394 add.append((newName, data)) |
| 404 | 395 |
| 405 if len(add): | 396 if len(add): |
| 406 data = {'titles[{}]'.format(name): re.sub(r'\.json', '', name) | 397 query = {'titles[{}]'.format(name): os.path.splitext(name)[0] |
| 407 for name, data in add} | 398 for name, _ in add} |
| 408 data['type'] = 'chrome' | 399 query['type'] = 'chrome' |
| 409 data, headers = crowdin_prepare_upload(add) | 400 data, headers = crowdin_prepare_upload(add) |
| 410 crowdin_request(projectName, 'add-file', key, post_data=data, | 401 crowdin_request(projectName, 'add-file', key, query, post_data=data, |
| 411 headers=headers) | 402 headers=headers) |
| 412 if len(update): | 403 if len(update): |
| 413 data, headers = crowdin_prepare_upload(update) | 404 data, headers = crowdin_prepare_upload(update) |
| 414 crowdin_request(projectName, 'update-file', key, post_data=data, | 405 crowdin_request(projectName, 'update-file', key, post_data=data, |
| 415 headers=headers) | 406 headers=headers) |
| 416 for file in existing: | 407 for file in existing: |
| 417 crowdin_request(projectName, 'delete-file', key, {'file': file}) | 408 crowdin_request(projectName, 'delete-file', key, {'file': file}) |
| 418 | 409 |
| 419 | 410 |
| 420 def uploadTranslations(localeConfig, metadata, dir, locale, projectName, key): | 411 def uploadTranslations(localeConfig, metadata, dir, locale, projectName, key): |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 513 | 504 |
| 514 # Remove any extra files | 505 # Remove any extra files |
| 515 for dir, files in dirs.iteritems(): | 506 for dir, files in dirs.iteritems(): |
| 516 baseDir = os.path.join(localeConfig['base_path'], dir) | 507 baseDir = os.path.join(localeConfig['base_path'], dir) |
| 517 if not os.path.exists(baseDir): | 508 if not os.path.exists(baseDir): |
| 518 continue | 509 continue |
| 519 for file in os.listdir(baseDir): | 510 for file in os.listdir(baseDir): |
| 520 path = os.path.join(baseDir, file) | 511 path = os.path.join(baseDir, file) |
| 521 if os.path.isfile(path) and (file.endswith('.json') or file.endswith
('.properties') or file.endswith('.dtd')) and not file in files: | 512 if os.path.isfile(path) and (file.endswith('.json') or file.endswith
('.properties') or file.endswith('.dtd')) and not file in files: |
| 522 os.remove(path) | 513 os.remove(path) |
| LEFT | RIGHT |