Left: | ||
Right: |
LEFT | RIGHT |
---|---|
1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public |
2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this |
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
4 | 4 |
5 import re | 5 import re |
6 import os | 6 import os |
7 import sys | 7 import sys |
8 import codecs | 8 import codecs |
9 import json | 9 import json |
10 import urlparse | 10 import urlparse |
11 import urllib | 11 import urllib |
12 import urllib2 | 12 import urllib2 |
13 import mimetypes | 13 import mimetypes |
14 from StringIO import StringIO | 14 from StringIO import StringIO |
15 from ConfigParser import SafeConfigParser | 15 from ConfigParser import SafeConfigParser |
16 from zipfile import ZipFile | 16 from zipfile import ZipFile |
17 from xml.parsers.expat import ParserCreate, XML_PARAM_ENTITY_PARSING_ALWAYS | 17 from xml.parsers.expat import ParserCreate, XML_PARAM_ENTITY_PARSING_ALWAYS |
18 | 18 |
19 langMappingGecko = { | 19 CROWDIN_LANG_MAPPING = { |
20 'bn-BD': 'bn', | |
21 'br': 'br-FR', | |
22 'dsb': 'dsb-DE', | |
23 'fj-FJ': 'fj', | |
24 'hsb': 'hsb-DE', | |
25 'hi-IN': 'hi', | |
26 'ml': 'ml-IN', | |
27 'nb-NO': 'nb', | |
28 'rm': 'rm-CH', | |
29 'ta-LK': 'ta', | |
30 'wo-SN': 'wo', | |
31 } | |
32 | |
33 langMappingChrome = { | |
34 'br': 'br-FR', | 20 'br': 'br-FR', |
35 'dsb': 'dsb-DE', | 21 'dsb': 'dsb-DE', |
36 'es': 'es-ES', | 22 'es': 'es-ES', |
37 'fur': 'fur-IT', | 23 'fur': 'fur-IT', |
38 'fy': 'fy-NL', | 24 'fy': 'fy-NL', |
39 'ga': 'ga-IE', | 25 'ga': 'ga-IE', |
40 'gu': 'gu-IN', | 26 'gu': 'gu-IN', |
41 'hsb': 'hsb-DE', | 27 'hsb': 'hsb-DE', |
42 'hy': 'hy-AM', | 28 'hy': 'hy-AM', |
43 'ml': 'ml-IN', | 29 'ml': 'ml-IN', |
44 'nn': 'nn-NO', | 30 'nn': 'nn-NO', |
45 'pa': 'pa-IN', | 31 'pa': 'pa-IN', |
46 'rm': 'rm-CH', | 32 'rm': 'rm-CH', |
47 'si': 'si-LK', | 33 'si': 'si-LK', |
48 'sv': 'sv-SE', | 34 'sv': 'sv-SE', |
49 'ur': 'ur-PK', | 35 'ur': 'ur-PK', |
50 } | 36 } |
51 | 37 |
52 chromeLocales = [ | |
53 'am', | |
54 'ar', | |
55 'bg', | |
56 'bn', | |
57 'ca', | |
58 'cs', | |
59 'da', | |
60 'de', | |
61 'el', | |
62 'en-GB', | |
63 'en-US', | |
64 'es-419', | |
65 'es', | |
66 'et', | |
67 'fa', | |
68 'fi', | |
69 'fil', | |
70 'fr', | |
71 'gu', | |
72 'he', | |
73 'hi', | |
74 'hr', | |
75 'hu', | |
76 'id', | |
77 'it', | |
78 'ja', | |
79 'kn', | |
80 'ko', | |
81 'lt', | |
82 'lv', | |
83 'ml', | |
84 'mr', | |
85 'ms', | |
86 'nb', | |
87 'nl', | |
88 'pl', | |
89 'pt-BR', | |
90 'pt-PT', | |
91 'ro', | |
92 'ru', | |
93 'sk', | |
94 'sl', | |
95 'sr', | |
96 'sv', | |
97 'sw', | |
98 'ta', | |
99 'te', | |
100 'th', | |
101 'tr', | |
102 'uk', | |
103 'vi', | |
104 'zh-CN', | |
105 'zh-TW', | |
106 ] | |
107 | |
108 CROWDIN_AP_URL = 'https://api.crowdin.com/api/project' | 38 CROWDIN_AP_URL = 'https://api.crowdin.com/api/project' |
39 FIREFOX_RELEASES_URL = 'http://www.mozilla.org/en-US/firefox/all.html' | |
40 FIREFOX_LP_URL = 'https://addons.mozilla.org/en-US/firefox/language-tools/' | |
41 CHROMIUM_DEB_URL = 'https://packages.debian.org/sid/all/chromium-l10n/filelist' | |
109 | 42 |
110 | 43 |
111 def crowdin_request(project_name, action, key, get={}, post_data=None, | 44 def crowdin_request(project_name, action, key, get={}, post_data=None, |
112 headers={}, raw=False): | 45 headers={}, raw=False): |
113 """Perform a call to crowdin and raise an Exception on failure.""" | 46 """Perform a call to crowdin and raise an Exception on failure.""" |
114 request = urllib2.Request( | 47 request = urllib2.Request( |
115 '{}/{}/{}?{}'.format(CROWDIN_AP_URL, | 48 '{}/{}/{}?{}'.format(CROWDIN_AP_URL, |
116 urllib.quote(project_name), | 49 urllib.quote(project_name), |
117 urllib.quote(action), | 50 urllib.quote(action), |
118 urllib.urlencode(dict(get, key=key, json=1))), | 51 urllib.urlencode(dict(get, key=key, json=1))), |
119 post_data, | 52 post_data, |
120 headers, | 53 headers, |
121 ) | 54 ) |
122 | 55 |
123 try: | 56 try: |
124 result = urllib2.urlopen(request).read() | 57 result = urllib2.urlopen(request).read() |
125 except urllib2.HTTPError as e: | 58 except urllib2.HTTPError as e: |
126 raise Exception('Server returned HTTP Error {}:\n{}'.format(e.code, | 59 raise Exception('Server returned HTTP Error {}:\n{}'.format(e.code, |
127 e.read())) | 60 e.read())) |
128 | 61 |
129 if not raw: | 62 if not raw: |
130 return json.loads(result) | 63 return json.loads(result) |
131 | 64 |
132 return result | 65 return result |
133 | 66 |
134 | 67 |
135 class OrderedDict(dict): | |
136 def __init__(self): | |
137 self.__order = [] | |
138 | |
139 def __setitem__(self, key, value): | |
140 self.__order.append(key) | |
141 dict.__setitem__(self, key, value) | |
142 | |
143 def iteritems(self): | |
144 done = set() | |
145 for key in self.__order: | |
146 if not key in done and key in self: | |
147 yield (key, self[key]) | |
148 done.add(key) | |
149 | |
150 | |
151 def escapeEntity(value): | |
152 return value.replace('&', '&').replace('<', '<').replace('>', '>') .replace('"', '"') | |
153 | |
154 | |
155 def unescapeEntity(value): | |
156 return value.replace('&', '&').replace('<', '<').replace('>', '>') .replace('"', '"') | |
157 | |
158 | |
159 def mapLocale(type, locale): | |
160 mapping = langMappingChrome if type == 'ISO-15897' else langMappingGecko | |
161 return mapping.get(locale, locale) | |
162 | |
163 | |
164 def escapeProperty(value): | |
165 return value.replace('\n', '\\n') | |
166 | |
167 | |
168 def unescapeProperty(value): | |
169 return value.replace('\\n', '\n') | |
170 | |
171 | |
172 def parsePropertiesString(data, path): | |
173 currentComment = None | |
174 for line in data.splitlines(): | |
175 match = re.search(r'^\s*[#!]\s*(.*)', line) | |
176 if match: | |
177 currentComment = match.group(1) | |
178 elif '=' in line: | |
179 key, value = line.split('=', 1) | |
180 yield (unescapeProperty(key), currentComment, unescapeProperty(value )) | |
181 currentComment = None | |
182 elif re.search(r'\S', line): | |
183 print >>sys.stderr, 'Unrecognized data in file %s: %s' % (path, line ) | |
184 | |
185 | |
186 def parseString(data, path): | |
187 result = {'_origData': data} | |
188 if path.endswith('.properties'): | |
Sebastian Noack
2017/10/03 02:22:39
Both, the .dtd and the .properties format are spec
tlucas
2017/10/04 11:48:38
Done.
| |
189 it = parsePropertiesString(data, path) | |
190 else: | |
191 return None | |
192 | |
193 for name, comment, value in it: | |
194 result[name] = value | |
195 return result | |
196 | |
197 | |
198 def readFile(path): | |
199 fileHandle = codecs.open(path, 'rb', encoding='utf-8') | |
200 data = fileHandle.read() | |
201 fileHandle.close() | |
202 return parseString(data, path) | |
203 | |
204 | |
205 def toJSON(path): | |
206 fileHandle = codecs.open(path, 'rb', encoding='utf-8') | |
207 data = fileHandle.read() | |
208 fileHandle.close() | |
209 | |
210 if path.endswith('.properties'): | |
211 it = parsePropertiesString(data, path) | |
212 else: | |
213 return None | |
214 | |
215 result = OrderedDict() | |
216 for name, comment, value in it: | |
217 obj = {'message': value} | |
218 if comment == None: | |
219 obj['description'] = name | |
220 else: | |
221 obj['description'] = '%s: %s' % (name, comment) | |
222 result[name] = obj | |
223 return json.dumps(result, ensure_ascii=False, indent=2) | |
224 | |
225 | |
226 def fromJSON(path, data): | |
227 data = json.loads(data) | |
228 if not data: | |
229 if os.path.exists(path): | |
230 os.remove(path) | |
231 return | |
232 | |
233 dir = os.path.dirname(path) | |
234 if not os.path.exists(dir): | |
235 os.makedirs(dir) | |
236 file = codecs.open(path, 'wb', encoding='utf-8') | |
237 for key, value in data.iteritems(): | |
238 file.write('{}={}\n'.format(escapeProperty(key), | |
239 escapeProperty(value['message']))) | |
240 file.close() | |
241 | |
242 | |
243 def preprocessChromeLocale(path, metadata, isMaster): | 68 def preprocessChromeLocale(path, metadata, isMaster): |
244 fileHandle = codecs.open(path, 'rb', encoding='utf-8') | 69 fileHandle = codecs.open(path, 'rb', encoding='utf-8') |
245 data = json.load(fileHandle) | 70 data = json.load(fileHandle) |
246 fileHandle.close() | 71 fileHandle.close() |
247 | 72 |
248 for key, value in data.iteritems(): | 73 for key, value in data.iteritems(): |
249 if isMaster: | 74 if isMaster: |
250 # Make sure the key name is listed in the description | 75 # Make sure the key name is listed in the description |
251 if 'description' in value: | 76 if 'description' in value: |
252 value['description'] = '%s: %s' % (key, value['description']) | 77 value['description'] = '%s: %s' % (key, value['description']) |
(...skipping 16 matching lines...) Expand all Loading... | |
269 for key, value in parsed.iteritems(): | 94 for key, value in parsed.iteritems(): |
270 if 'description' in value: | 95 if 'description' in value: |
271 del value['description'] | 96 del value['description'] |
272 | 97 |
273 file = codecs.open(path, 'wb', encoding='utf-8') | 98 file = codecs.open(path, 'wb', encoding='utf-8') |
274 json.dump(parsed, file, ensure_ascii=False, sort_keys=True, indent=2, separa tors=(',', ': ')) | 99 json.dump(parsed, file, ensure_ascii=False, sort_keys=True, indent=2, separa tors=(',', ': ')) |
275 file.close() | 100 file.close() |
276 | 101 |
277 | 102 |
278 def setupTranslations(localeConfig, projectName, key): | 103 def setupTranslations(localeConfig, projectName, key): |
279 # Make a new set from the locales list, mapping to Crowdin friendly format | 104 locales = set() |
280 locales = {mapLocale(localeConfig['name_format'], locale) | 105 |
281 for locale in localeConfig['locales']} | 106 # Languages supported by Firefox |
282 | 107 data = urllib2.urlopen(FIREFOX_RELEASES_URL).read() |
283 # Fill up with locales that we don't have but the browser supports | 108 for match in re.finditer(r'&lang=([\w\-]+)"', data): |
284 if 'chrome' in localeConfig['target_platforms']: | 109 locales.add(match.group(1)) |
285 for locale in chromeLocales: | 110 |
286 locales.add(mapLocale('ISO-15897', locale)) | 111 # Languages supported by Firefox Language Packs |
287 | 112 data = urllib2.urlopen(FIREFOX_LP_URL).read() |
288 if 'gecko-webext' in localeConfig['target_platforms']: | 113 for match in re.finditer(r'<tr>.*?</tr>', data, re.S): |
289 firefoxLocales = urllib2.urlopen('http://www.mozilla.org/en-US/firefox/a ll.html').read() | 114 if match.group(0).find('Install Language Pack') >= 0: |
290 for match in re.finditer(r'&lang=([\w\-]+)"', firefoxLocales): | 115 match2 = re.search(r'lang="([\w\-]+)"', match.group(0)) |
291 locales.add(mapLocale('BCP-47', match.group(1))) | 116 if match2: |
292 langPacks = urllib2.urlopen('https://addons.mozilla.org/en-US/firefox/la nguage-tools/').read() | 117 locales.add(match2.group(1)) |
293 for match in re.finditer(r'<tr>.*?</tr>', langPacks, re.S): | 118 |
294 if match.group(0).find('Install Language Pack') >= 0: | 119 # Languages supported by Chrome (excluding es-419) |
295 match2 = re.search(r'lang="([\w\-]+)"', match.group(0)) | 120 data = urllib2.urlopen(CHROMIUM_DEB_URL).read() |
296 if match2: | 121 for match in re.finditer(r'locales/(?!es-419)([\w\-]+)\.pak', data): |
297 locales.add(mapLocale('BCP-47', match2.group(1))) | 122 locales.add(match.group(1)) |
298 | 123 |
299 allowed = set() | 124 # We don't translate indvidual dialects of languages |
300 allowedLocales = crowdin_request(projectName, 'supported-languages', key) | 125 # other than English, Spanish, Portuguese and Chinese. |
301 | 126 for locale in list(locales): |
302 for locale in allowedLocales: | 127 prefix = locale.split('-')[0] |
303 allowed.add(locale['crowdin_code']) | 128 if prefix not in {'en', 'es', 'pt', 'zh'}: |
129 locales.remove(locale) | |
130 locales.add(prefix) | |
131 | |
132 # Add languages with existing translations. | |
133 locales.update(localeConfig['locales']) | |
134 | |
135 # Don't add the language we translate from as target translation. | |
136 locales.remove(localeConfig['default_locale'].replace('_', '-')) | |
137 | |
138 # Convert to locales understood by Crowdin. | |
139 locales = {CROWDIN_LANG_MAPPING.get(locale, locale) for locale in locales} | |
140 allowed = {locale['crowdin_code'] for locale in | |
141 crowdin_request(projectName, 'supported-languages', key)} | |
304 if not allowed.issuperset(locales): | 142 if not allowed.issuperset(locales): |
305 print "Warning, following locales aren't allowed by server: " + ', '.joi n(locales - allowed) | 143 print "Warning, following locales aren't allowed by server: " + ', '.joi n(locales - allowed) |
306 | 144 |
307 locales = list(locales & allowed) | 145 locales = sorted(locales & allowed) |
308 locales.sort() | |
309 params = urllib.urlencode([('languages[]', locale) for locale in locales]) | 146 params = urllib.urlencode([('languages[]', locale) for locale in locales]) |
310 | |
311 crowdin_request(projectName, 'edit-project', key, post_data=params) | 147 crowdin_request(projectName, 'edit-project', key, post_data=params) |
312 | 148 |
313 | 149 |
314 def crowdin_prepare_upload(files): | 150 def crowdin_prepare_upload(files): |
315 """Create a post body and matching headers, which Crowdin can handle.""" | 151 """Create a post body and matching headers, which Crowdin can handle.""" |
316 boundary = '----------ThIs_Is_tHe_bouNdaRY_$' | 152 boundary = '----------ThIs_Is_tHe_bouNdaRY_$' |
317 body = '' | 153 body = '' |
318 for name, data in files: | 154 for name, data in files: |
319 body += ( | 155 body += ( |
320 '--{boundary}\r\n' | 156 '--{boundary}\r\n' |
(...skipping 19 matching lines...) Expand all Loading... | |
340 | 176 |
341 def updateTranslationMaster(localeConfig, metadata, dir, projectName, key): | 177 def updateTranslationMaster(localeConfig, metadata, dir, projectName, key): |
342 result = crowdin_request(projectName, 'info', key) | 178 result = crowdin_request(projectName, 'info', key) |
343 | 179 |
344 existing = set(map(lambda f: f['name'], result['files'])) | 180 existing = set(map(lambda f: f['name'], result['files'])) |
345 add = [] | 181 add = [] |
346 update = [] | 182 update = [] |
347 for file in os.listdir(dir): | 183 for file in os.listdir(dir): |
348 path = os.path.join(dir, file) | 184 path = os.path.join(dir, file) |
349 if os.path.isfile(path): | 185 if os.path.isfile(path): |
350 if localeConfig['file_format'] == 'chrome-json' and file.endswith('. json'): | 186 if file.endswith('.json'): |
351 data = preprocessChromeLocale(path, metadata, True) | 187 data = preprocessChromeLocale(path, metadata, True) |
352 newName = file | 188 newName = file |
353 elif localeConfig['file_format'] == 'chrome-json': | 189 else: |
354 fileHandle = codecs.open(path, 'rb', encoding='utf-8') | 190 fileHandle = codecs.open(path, 'rb', encoding='utf-8') |
355 data = json.dumps({file: {'message': fileHandle.read()}}) | 191 data = json.dumps({file: {'message': fileHandle.read()}}) |
356 fileHandle.close() | 192 fileHandle.close() |
357 newName = file + '.json' | |
358 else: | |
359 data = toJSON(path) | |
360 newName = file + '.json' | 193 newName = file + '.json' |
361 | 194 |
362 if data: | 195 if data: |
363 if newName in existing: | 196 if newName in existing: |
364 update.append((newName, data)) | 197 update.append((newName, data)) |
365 existing.remove(newName) | 198 existing.remove(newName) |
366 else: | 199 else: |
367 add.append((newName, data)) | 200 add.append((newName, data)) |
368 | 201 |
369 if len(add): | 202 if len(add): |
370 query = {'titles[{}]'.format(name): os.path.splitext(name)[0] | 203 query = {'titles[{}]'.format(name): os.path.splitext(name)[0] |
371 for name, _ in add} | 204 for name, _ in add} |
372 query['type'] = 'chrome' | 205 query['type'] = 'chrome' |
373 data, headers = crowdin_prepare_upload(add) | 206 data, headers = crowdin_prepare_upload(add) |
374 crowdin_request(projectName, 'add-file', key, query, post_data=data, | 207 crowdin_request(projectName, 'add-file', key, query, post_data=data, |
375 headers=headers) | 208 headers=headers) |
376 if len(update): | 209 if len(update): |
377 data, headers = crowdin_prepare_upload(update) | 210 data, headers = crowdin_prepare_upload(update) |
378 crowdin_request(projectName, 'update-file', key, post_data=data, | 211 crowdin_request(projectName, 'update-file', key, post_data=data, |
379 headers=headers) | 212 headers=headers) |
380 for file in existing: | 213 for file in existing: |
381 crowdin_request(projectName, 'delete-file', key, {'file': file}) | 214 crowdin_request(projectName, 'delete-file', key, {'file': file}) |
382 | 215 |
383 | 216 |
384 def uploadTranslations(localeConfig, metadata, dir, locale, projectName, key): | 217 def uploadTranslations(localeConfig, metadata, dir, locale, projectName, key): |
385 files = [] | 218 files = [] |
386 for file in os.listdir(dir): | 219 for file in os.listdir(dir): |
387 path = os.path.join(dir, file) | 220 path = os.path.join(dir, file) |
388 if os.path.isfile(path): | 221 if os.path.isfile(path): |
389 if localeConfig['file_format'] == 'chrome-json' and file.endswith('. json'): | 222 if file.endswith('.json'): |
390 data = preprocessChromeLocale(path, metadata, False) | 223 data = preprocessChromeLocale(path, metadata, False) |
391 newName = file | 224 newName = file |
392 elif localeConfig['file_format'] == 'chrome-json': | 225 else: |
393 fileHandle = codecs.open(path, 'rb', encoding='utf-8') | 226 fileHandle = codecs.open(path, 'rb', encoding='utf-8') |
394 data = json.dumps({file: {'message': fileHandle.read()}}) | 227 data = json.dumps({file: {'message': fileHandle.read()}}) |
395 fileHandle.close() | 228 fileHandle.close() |
396 newName = file + '.json' | 229 newName = file + '.json' |
397 else: | |
398 data = toJSON(path) | |
399 newName = file + '.json' | |
400 | 230 |
401 if data: | 231 if data: |
402 files.append((newName, data)) | 232 files.append((newName, data)) |
403 if len(files): | 233 if len(files): |
404 language = mapLocale(localeConfig['name_format'], locale) | 234 language = CROWDIN_LANG_MAPPING.get(locale, locale) |
405 data, headers = crowdin_prepare_upload(files) | 235 data, headers = crowdin_prepare_upload(files) |
406 crowdin_request(projectName, 'upload-translation', key, | 236 crowdin_request(projectName, 'upload-translation', key, |
407 {'language': language}, post_data=data, | 237 {'language': language}, post_data=data, |
408 headers=headers) | 238 headers=headers) |
409 | 239 |
410 | 240 |
411 def getTranslations(localeConfig, projectName, key): | 241 def getTranslations(localeConfig, projectName, key): |
412 """Download all available translations from crowdin. | 242 """Download all available translations from crowdin. |
413 | 243 |
414 Trigger crowdin to build the available export, wait for crowdin to | 244 Trigger crowdin to build the available export, wait for crowdin to |
415 finish the job and download the generated zip afterwards. | 245 finish the job and download the generated zip afterwards. |
416 """ | 246 """ |
417 crowdin_request(projectName, 'export', key) | 247 crowdin_request(projectName, 'export', key) |
418 | 248 |
419 result = crowdin_request(projectName, 'download/all.zip', key, raw=True) | 249 result = crowdin_request(projectName, 'download/all.zip', key, raw=True) |
420 zip = ZipFile(StringIO(result)) | 250 zip = ZipFile(StringIO(result)) |
421 dirs = {} | 251 dirs = {} |
422 | 252 |
423 normalizedDefaultLocale = localeConfig['default_locale'] | 253 normalizedDefaultLocale = localeConfig['default_locale'].replace('_', '-') |
424 if localeConfig['name_format'] == 'ISO-15897': | 254 normalizedDefaultLocale = CROWDIN_LANG_MAPPING.get(normalizedDefaultLocale, |
425 normalizedDefaultLocale = normalizedDefaultLocale.replace('_', '-') | 255 normalizedDefaultLocale) |
426 normalizedDefaultLocale = mapLocale(localeConfig['name_format'], | |
427 normalizedDefaultLocale) | |
428 | 256 |
429 for info in zip.infolist(): | 257 for info in zip.infolist(): |
430 if not info.filename.endswith('.json'): | 258 if not info.filename.endswith('.json'): |
431 continue | 259 continue |
432 | 260 |
433 dir, file = os.path.split(info.filename) | 261 dir, file = os.path.split(info.filename) |
434 if not re.match(r'^[\w\-]+$', dir) or dir == normalizedDefaultLocale: | 262 if not re.match(r'^[\w\-]+$', dir) or dir == normalizedDefaultLocale: |
435 continue | 263 continue |
436 if localeConfig['file_format'] == 'chrome-json' and file.count('.') == 1 : | 264 if file.count('.') == 1: |
437 origFile = file | 265 origFile = file |
438 else: | 266 else: |
439 origFile = re.sub(r'\.json$', '', file) | 267 origFile = os.path.splitext(file)[0] |
440 | 268 |
441 if localeConfig['name_format'] == 'ISO-15897': | 269 for key, value in CROWDIN_LANG_MAPPING.iteritems(): |
442 mapping = langMappingChrome | |
443 else: | |
444 mapping = langMappingGecko | |
445 | |
446 for key, value in mapping.iteritems(): | |
447 if value == dir: | 270 if value == dir: |
448 dir = key | 271 dir = key |
449 if localeConfig['name_format'] == 'ISO-15897': | |
450 dir = dir.replace('-', '_') | |
451 | 272 |
452 data = zip.open(info.filename).read() | 273 data = zip.open(info.filename).read() |
453 if data == '[]': | 274 if data == '[]': |
454 continue | 275 continue |
455 | 276 |
456 if not dir in dirs: | 277 if not dir in dirs: |
457 dirs[dir] = set() | 278 dirs[dir] = set() |
458 dirs[dir].add(origFile) | 279 dirs[dir].add(origFile) |
459 | 280 |
460 path = os.path.join(localeConfig['base_path'], dir, origFile) | 281 path = os.path.join(localeConfig['base_path'], dir, origFile) |
461 if not os.path.exists(os.path.dirname(path)): | 282 if not os.path.exists(os.path.dirname(path)): |
462 os.makedirs(os.path.dirname(path)) | 283 os.makedirs(os.path.dirname(path)) |
463 if localeConfig['file_format'] == 'chrome-json' and file.endswith('.json '): | 284 if file.endswith('.json'): |
464 postprocessChromeLocale(path, data) | 285 postprocessChromeLocale(path, data) |
465 elif localeConfig['file_format'] == 'chrome-json': | 286 else: |
466 data = json.loads(data) | 287 data = json.loads(data) |
467 if origFile in data: | 288 if origFile in data: |
468 fileHandle = codecs.open(path, 'wb', encoding='utf-8') | 289 fileHandle = codecs.open(path, 'wb', encoding='utf-8') |
469 fileHandle.write(data[origFile]['message']) | 290 fileHandle.write(data[origFile]['message']) |
470 fileHandle.close() | 291 fileHandle.close() |
471 else: | |
472 fromJSON(path, data) | |
473 | 292 |
474 # Remove any extra files | 293 # Remove any extra files |
475 for dir, files in dirs.iteritems(): | 294 for dir, files in dirs.iteritems(): |
476 baseDir = os.path.join(localeConfig['base_path'], dir) | 295 baseDir = os.path.join(localeConfig['base_path'], dir) |
477 if not os.path.exists(baseDir): | 296 if not os.path.exists(baseDir): |
478 continue | 297 continue |
479 for file in os.listdir(baseDir): | 298 for file in os.listdir(baseDir): |
480 path = os.path.join(baseDir, file) | 299 path = os.path.join(baseDir, file) |
481 valid_extension = os.path.splitext(file)[1] in {'.json', | 300 valid_extension = file.endswith('.json') |
482 '.properties'} | |
483 if os.path.isfile(path) and valid_extension and not file in files: | 301 if os.path.isfile(path) and valid_extension and not file in files: |
484 os.remove(path) | 302 os.remove(path) |
LEFT | RIGHT |