Left: | ||
Right: |
LEFT | RIGHT |
---|---|
1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public |
2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this |
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
4 | 4 |
5 import re | 5 import re |
6 import os | 6 import os |
7 import sys | 7 import sys |
8 import codecs | 8 import codecs |
9 import json | 9 import json |
10 import urlparse | 10 import urlparse |
11 import urllib | 11 import urllib |
12 import urllib2 | 12 import urllib2 |
13 import mimetypes | 13 import mimetypes |
14 from StringIO import StringIO | 14 from StringIO import StringIO |
15 from ConfigParser import SafeConfigParser | 15 from ConfigParser import SafeConfigParser |
16 from zipfile import ZipFile | 16 from zipfile import ZipFile |
17 from xml.parsers.expat import ParserCreate, XML_PARAM_ENTITY_PARSING_ALWAYS | 17 from xml.parsers.expat import ParserCreate, XML_PARAM_ENTITY_PARSING_ALWAYS |
18 | |
19 CROWDIN_AP_URL = 'https://api.crowdin.com/api/project' | |
20 | 18 |
21 CROWDIN_LANG_MAPPING = { | 19 CROWDIN_LANG_MAPPING = { |
tlucas
2017/10/04 11:48:39
Note: result of rebasing
| |
22 'br': 'br-FR', | 20 'br': 'br-FR', |
23 'dsb': 'dsb-DE', | 21 'dsb': 'dsb-DE', |
24 'es': 'es-ES', | 22 'es': 'es-ES', |
25 'fur': 'fur-IT', | 23 'fur': 'fur-IT', |
26 'fy': 'fy-NL', | 24 'fy': 'fy-NL', |
27 'ga': 'ga-IE', | 25 'ga': 'ga-IE', |
28 'gu': 'gu-IN', | 26 'gu': 'gu-IN', |
29 'hsb': 'hsb-DE', | 27 'hsb': 'hsb-DE', |
30 'hy': 'hy-AM', | 28 'hy': 'hy-AM', |
31 'ml': 'ml-IN', | 29 'ml': 'ml-IN', |
32 'nn': 'nn-NO', | 30 'nn': 'nn-NO', |
33 'pa': 'pa-IN', | 31 'pa': 'pa-IN', |
34 'rm': 'rm-CH', | 32 'rm': 'rm-CH', |
35 'si': 'si-LK', | 33 'si': 'si-LK', |
36 'sv': 'sv-SE', | 34 'sv': 'sv-SE', |
37 'ur': 'ur-PK', | 35 'ur': 'ur-PK', |
38 } | 36 } |
39 | 37 |
38 CROWDIN_AP_URL = 'https://api.crowdin.com/api/project' | |
39 FIREFOX_RELEASES_URL = 'http://www.mozilla.org/en-US/firefox/all.html' | |
40 FIREFOX_LP_URL = 'https://addons.mozilla.org/en-US/firefox/language-tools/' | |
41 CHROMIUM_DEB_URL = 'https://packages.debian.org/sid/all/chromium-l10n/filelist' | |
42 | |
40 | 43 |
41 def crowdin_request(project_name, action, key, get={}, post_data=None, | 44 def crowdin_request(project_name, action, key, get={}, post_data=None, |
42 headers={}, raw=False): | 45 headers={}, raw=False): |
43 """Perform a call to crowdin and raise an Exception on failure.""" | 46 """Perform a call to crowdin and raise an Exception on failure.""" |
44 request = urllib2.Request( | 47 request = urllib2.Request( |
45 '{}/{}/{}?{}'.format(CROWDIN_AP_URL, | 48 '{}/{}/{}?{}'.format(CROWDIN_AP_URL, |
46 urllib.quote(project_name), | 49 urllib.quote(project_name), |
47 urllib.quote(action), | 50 urllib.quote(action), |
48 urllib.urlencode(dict(get, key=key, json=1))), | 51 urllib.urlencode(dict(get, key=key, json=1))), |
49 post_data, | 52 post_data, |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
94 | 97 |
95 file = codecs.open(path, 'wb', encoding='utf-8') | 98 file = codecs.open(path, 'wb', encoding='utf-8') |
96 json.dump(parsed, file, ensure_ascii=False, sort_keys=True, indent=2, separa tors=(',', ': ')) | 99 json.dump(parsed, file, ensure_ascii=False, sort_keys=True, indent=2, separa tors=(',', ': ')) |
97 file.close() | 100 file.close() |
98 | 101 |
99 | 102 |
100 def setupTranslations(localeConfig, projectName, key): | 103 def setupTranslations(localeConfig, projectName, key): |
101 locales = set() | 104 locales = set() |
102 | 105 |
103 # Languages supported by Firefox | 106 # Languages supported by Firefox |
104 data = urllib2.urlopen('http://www.mozilla.org/en-US/firefox/all.html').read () | 107 data = urllib2.urlopen(FIREFOX_RELEASES_URL).read() |
105 for match in re.finditer(r'&lang=([\w\-]+)"', data): | 108 for match in re.finditer(r'&lang=([\w\-]+)"', data): |
106 locales.add(match.group(1)) | 109 locales.add(match.group(1)) |
107 | 110 |
108 # Languages supported by Firefox Language Packs | 111 # Languages supported by Firefox Language Packs |
109 data = urllib2.urlopen('https://addons.mozilla.org/en-US/firefox/language-to ols/').read() | 112 data = urllib2.urlopen(FIREFOX_LP_URL).read() |
110 for match in re.finditer(r'<tr>.*?</tr>', data, re.S): | 113 for match in re.finditer(r'<tr>.*?</tr>', data, re.S): |
111 if match.group(0).find('Install Language Pack') >= 0: | 114 if match.group(0).find('Install Language Pack') >= 0: |
112 match2 = re.search(r'lang="([\w\-]+)"', match.group(0)) | 115 match2 = re.search(r'lang="([\w\-]+)"', match.group(0)) |
113 if match2: | 116 if match2: |
114 locales.add(match2.group(1)) | 117 locales.add(match2.group(1)) |
115 | 118 |
116 # Languages supported by Chrome (excluding es-419) | 119 # Languages supported by Chrome (excluding es-419) |
117 data = urllib2.urlopen('https://packages.debian.org/sid/all/chromium-l10n/fi lelist').read() | 120 data = urllib2.urlopen(CHROMIUM_DEB_URL).read() |
118 for match in re.finditer(r'locales/(?!es-419)([\w\-]+)\.pak', data): | 121 for match in re.finditer(r'locales/(?!es-419)([\w\-]+)\.pak', data): |
119 locales.add(match.group(1)) | 122 locales.add(match.group(1)) |
120 | 123 |
121 # We don't translate indvidual dialects of languages | 124 # We don't translate indvidual dialects of languages |
122 # other than English, Spanish, Portuguese and Chinese. | 125 # other than English, Spanish, Portuguese and Chinese. |
123 for locale in list(locales): | 126 for locale in list(locales): |
124 prefix = locale.split('-')[0] | 127 prefix = locale.split('-')[0] |
125 if prefix not in {'en', 'es', 'pt', 'zh'}: | 128 if prefix not in {'en', 'es', 'pt', 'zh'}: |
126 locales.remove(locale) | 129 locales.remove(locale) |
127 locales.add(prefix) | 130 locales.add(prefix) |
128 | 131 |
129 # Add languages with existing translations. | 132 # Add languages with existing translations. |
130 locales.update(localeConfig['locales']) | 133 locales.update(localeConfig['locales']) |
131 | 134 |
132 # Don't add the language we translate from as target translation. | 135 # Don't add the language we translate from as target translation. |
133 locales.remove(localeConfig['default_locale'].replace('_', '-')) | 136 locales.remove(localeConfig['default_locale'].replace('_', '-')) |
134 | 137 |
135 # Convert to locales understood by Crowdin. | 138 # Convert to locales understood by Crowdin. |
136 locales = {CROWDIN_LANG_MAPPING.get(locale, locale) for locale in locales} | 139 locales = {CROWDIN_LANG_MAPPING.get(locale, locale) for locale in locales} |
137 allowed = {locale['crowdin_code'] for locale in | 140 allowed = {locale['crowdin_code'] for locale in |
138 crowdin_request(projectName, 'supported-languages', key)} | 141 crowdin_request(projectName, 'supported-languages', key)} |
tlucas
2017/10/04 11:48:39
Note: result of rebasing
| |
139 if not allowed.issuperset(locales): | 142 if not allowed.issuperset(locales): |
140 print "Warning, following locales aren't allowed by server: " + ', '.joi n(locales - allowed) | 143 print "Warning, following locales aren't allowed by server: " + ', '.joi n(locales - allowed) |
141 | 144 |
142 locales = sorted(locales & allowed) | 145 locales = sorted(locales & allowed) |
tlucas
2017/10/04 11:48:40
Note: result of rebasing
| |
143 params = urllib.urlencode([('languages[]', locale) for locale in locales]) | 146 params = urllib.urlencode([('languages[]', locale) for locale in locales]) |
144 crowdin_request(projectName, 'edit-project', key, post_data=params) | 147 crowdin_request(projectName, 'edit-project', key, post_data=params) |
145 | 148 |
146 | 149 |
147 def crowdin_prepare_upload(files): | 150 def crowdin_prepare_upload(files): |
148 """Create a post body and matching headers, which Crowdin can handle.""" | 151 """Create a post body and matching headers, which Crowdin can handle.""" |
149 boundary = '----------ThIs_Is_tHe_bouNdaRY_$' | 152 boundary = '----------ThIs_Is_tHe_bouNdaRY_$' |
150 body = '' | 153 body = '' |
151 for name, data in files: | 154 for name, data in files: |
152 body += ( | 155 body += ( |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
221 newName = file | 224 newName = file |
222 else: | 225 else: |
223 fileHandle = codecs.open(path, 'rb', encoding='utf-8') | 226 fileHandle = codecs.open(path, 'rb', encoding='utf-8') |
224 data = json.dumps({file: {'message': fileHandle.read()}}) | 227 data = json.dumps({file: {'message': fileHandle.read()}}) |
225 fileHandle.close() | 228 fileHandle.close() |
226 newName = file + '.json' | 229 newName = file + '.json' |
227 | 230 |
228 if data: | 231 if data: |
229 files.append((newName, data)) | 232 files.append((newName, data)) |
230 if len(files): | 233 if len(files): |
231 language = CROWDIN_LANG_MAPPING.get(locale, locale) | 234 language = CROWDIN_LANG_MAPPING.get(locale, locale) |
tlucas
2017/10/04 11:48:39
Note: result of rebasing
| |
232 data, headers = crowdin_prepare_upload(files) | 235 data, headers = crowdin_prepare_upload(files) |
233 crowdin_request(projectName, 'upload-translation', key, | 236 crowdin_request(projectName, 'upload-translation', key, |
234 {'language': language}, post_data=data, | 237 {'language': language}, post_data=data, |
235 headers=headers) | 238 headers=headers) |
236 | 239 |
237 | 240 |
238 def getTranslations(localeConfig, projectName, key): | 241 def getTranslations(localeConfig, projectName, key): |
239 """Download all available translations from crowdin. | 242 """Download all available translations from crowdin. |
240 | 243 |
241 Trigger crowdin to build the available export, wait for crowdin to | 244 Trigger crowdin to build the available export, wait for crowdin to |
(...skipping 14 matching lines...) Expand all Loading... | |
256 continue | 259 continue |
257 | 260 |
258 dir, file = os.path.split(info.filename) | 261 dir, file = os.path.split(info.filename) |
259 if not re.match(r'^[\w\-]+$', dir) or dir == normalizedDefaultLocale: | 262 if not re.match(r'^[\w\-]+$', dir) or dir == normalizedDefaultLocale: |
260 continue | 263 continue |
261 if file.count('.') == 1: | 264 if file.count('.') == 1: |
262 origFile = file | 265 origFile = file |
263 else: | 266 else: |
264 origFile = os.path.splitext(file)[0] | 267 origFile = os.path.splitext(file)[0] |
265 | 268 |
266 for key, value in CROWDIN_LANG_MAPPING.iteritems(): | 269 for key, value in CROWDIN_LANG_MAPPING.iteritems(): |
tlucas
2017/10/04 11:48:39
Note: result of rebasing
| |
267 if value == dir: | 270 if value == dir: |
268 dir = key | 271 dir = key |
269 | 272 |
270 data = zip.open(info.filename).read() | 273 data = zip.open(info.filename).read() |
271 if data == '[]': | 274 if data == '[]': |
272 continue | 275 continue |
273 | 276 |
274 if not dir in dirs: | 277 if not dir in dirs: |
275 dirs[dir] = set() | 278 dirs[dir] = set() |
276 dirs[dir].add(origFile) | 279 dirs[dir].add(origFile) |
(...skipping 13 matching lines...) Expand all Loading... | |
290 # Remove any extra files | 293 # Remove any extra files |
291 for dir, files in dirs.iteritems(): | 294 for dir, files in dirs.iteritems(): |
292 baseDir = os.path.join(localeConfig['base_path'], dir) | 295 baseDir = os.path.join(localeConfig['base_path'], dir) |
293 if not os.path.exists(baseDir): | 296 if not os.path.exists(baseDir): |
294 continue | 297 continue |
295 for file in os.listdir(baseDir): | 298 for file in os.listdir(baseDir): |
296 path = os.path.join(baseDir, file) | 299 path = os.path.join(baseDir, file) |
297 valid_extension = file.endswith('.json') | 300 valid_extension = file.endswith('.json') |
298 if os.path.isfile(path) and valid_extension and not file in files: | 301 if os.path.isfile(path) and valid_extension and not file in files: |
299 os.remove(path) | 302 os.remove(path) |
LEFT | RIGHT |