Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: cms/bin/translate.py

Issue 29317015: Issue 2625 - [cms] Crowdin synchronisation script (Closed)
Left Patch Set: Addressed Sebastian's feedback Created July 14, 2015, 12:50 p.m.
Right Patch Set: Give query_params a default value Created July 16, 2015, 12:47 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « README.md ('k') | cms/converters.py » ('j') | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 # coding: utf-8 1 # coding: utf-8
2 2
3 # This file is part of the Adblock Plus web scripts, 3 # This file is part of the Adblock Plus web scripts,
4 # Copyright (C) 2006-2015 Eyeo GmbH 4 # Copyright (C) 2006-2015 Eyeo GmbH
5 # 5 #
6 # Adblock Plus is free software: you can redistribute it and/or modify 6 # Adblock Plus is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License version 3 as 7 # it under the terms of the GNU General Public License version 3 as
8 # published by the Free Software Foundation. 8 # published by the Free Software Foundation.
9 # 9 #
10 # Adblock Plus is distributed in the hope that it will be useful, 10 # Adblock Plus is distributed in the hope that it will be useful,
(...skipping 23 matching lines...) Expand all
34 logger = logging.getLogger("cms.bin.translate") 34 logger = logging.getLogger("cms.bin.translate")
35 35
36 class CrowdinAPI: 36 class CrowdinAPI:
37 FILES_PER_REQUEST = 20 37 FILES_PER_REQUEST = 20
38 38
39 def __init__(self, api_key, project_name): 39 def __init__(self, api_key, project_name):
40 self.api_key = api_key 40 self.api_key = api_key
41 self.project_name = project_name 41 self.project_name = project_name
42 self.connection = urllib3.connection_from_url("https://api.crowdin.com/") 42 self.connection = urllib3.connection_from_url("https://api.crowdin.com/")
43 43
44 def raw_request(self, request_method, api_endpoint, query_params=(), **kwargs) :
45 url = "/api/project/%s/%s?%s" % (
46 urllib.quote(self.project_name),
47 urllib.quote(api_endpoint),
48 urllib.urlencode((("key", self.api_key),) + query_params)
49 )
50 try:
51 response = self.connection.request(
52 request_method, str(url), **kwargs
53 )
54 except urllib3.exceptions.HTTPError:
55 logger.error("Connection to API endpoint %s failed", url)
56 raise
57 if response.status < 200 or response.status >= 300:
58 logger.error("API call to %s failed:\n%s", url, response.data)
59 raise urllib3.exceptions.HTTPError(response.status)
60 return response
61
44 def request(self, request_method, api_endpoint, data=None, files=None): 62 def request(self, request_method, api_endpoint, data=None, files=None):
45 url = "/api/project/%s/%s?%s" % (
46 urllib.quote(self.project_name), urllib.quote(api_endpoint),
47 urllib.urlencode([("key", self.api_key), ("json", "1")])
48 )
49
50 fields = [] 63 fields = []
51 if data: 64 if data:
52 for name, value in data.iteritems(): 65 for name, value in data.iteritems():
53 if isinstance(value, basestring): 66 if isinstance(value, basestring):
54 fields.append((name, value)) 67 fields.append((name, value))
55 else: 68 else:
56 fields.extend((name + "[]", v) for v in value) 69 fields.extend((name + "[]", v) for v in value)
57 if files: 70 if files:
58 fields.extend(("files[%s]" % f[0], f) for f in files) 71 fields.extend(("files[%s]" % f[0], f) for f in files)
59 72
73 response = self.raw_request(
74 request_method, api_endpoint, (("json", "1"),),
75 fields=fields, preload_content=False
76 )
77
60 try: 78 try:
61 response = self.connection.request( 79 return json.load(response)
62 request_method, str(url), fields=fields,
63 timeout=urllib3.Timeout(connect=5)
64 )
65 if response.status < 200 or response.status >= 300:
66 raise urllib3.exceptions.HTTPError(response.status)
67 except urllib3.exceptions.HTTPError:
68 logger.error("API call to %s failed:\n%s", url, response.data)
69 raise
70
71 try:
72 return json.loads(response.data)
73 except ValueError: 80 except ValueError:
74 logger.error("Invalid response returned by API endpoint %s", url) 81 logger.error("Invalid response returned by API endpoint %s", url)
75 raise 82 raise
76 83
77 84
78 def grouper(iterable, n): 85 def grouper(iterable, n):
79 iterator = iter(iterable) 86 iterator = iter(iterable)
80 while True: 87 while True:
81 chunk = tuple(itertools.islice(iterator, n)) 88 chunk = tuple(itertools.islice(iterator, n))
82 if not chunk: 89 if not chunk:
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
180 187
181 def update_existing_files(crowdin_api, existing_files, page_strings): 188 def update_existing_files(crowdin_api, existing_files, page_strings):
182 add_update_files(crowdin_api, "update-file", "Updating %d existing pages...", 189 add_update_files(crowdin_api, "update-file", "Updating %d existing pages...",
183 existing_files, page_strings) 190 existing_files, page_strings)
184 191
185 def upload_translations(crowdin_api, source_dir, new_files, required_locales): 192 def upload_translations(crowdin_api, source_dir, new_files, required_locales):
186 def open_locale_files(locale, files): 193 def open_locale_files(locale, files):
187 for file_name in files: 194 for file_name in files:
188 path = os.path.join(source_dir, "locales", locale, file_name) 195 path = os.path.join(source_dir, "locales", locale, file_name)
189 if os.path.isfile(path): 196 if os.path.isfile(path):
190 with open(path, "r") as f: 197 with open(path, "rb") as f:
Sebastian Noack 2015/07/14 14:39:28 Nit: mode="rb" for compatibility with Windows.
kzar 2015/07/15 09:51:24 Done.
191 yield (file_name, f.read(), "application/json") 198 yield (file_name, f.read(), "application/json")
192 199
193 if new_files: 200 if new_files:
194 for locale in required_locales: 201 for locale in required_locales:
195 for files in grouper(open_locale_files(locale, new_files), 202 for files in grouper(open_locale_files(locale, new_files),
196 crowdin_api.FILES_PER_REQUEST): 203 crowdin_api.FILES_PER_REQUEST):
197 logger.info("Uploading %d existing translation " 204 logger.info("Uploading %d existing translation "
198 "files for locale %s...", len(files), locale) 205 "files for locale %s...", len(files), locale)
199 crowdin_api.request("POST", "upload-translation", files=files, 206 crowdin_api.request("POST", "upload-translation", files=files,
200 data={"language": locale}) 207 data={"language": locale})
201 208
202 def remove_old_files(crowdin_api, old_files): 209 def remove_old_files(crowdin_api, old_files):
203 for file_name in old_files: 210 for file_name in old_files:
204 logger.info("Removing old file %s", file_name) 211 logger.info("Removing old file %s", file_name)
205 crowdin_api.request("POST", "delete-file", data={"file": file_name}) 212 crowdin_api.request("POST", "delete-file", data={"file": file_name})
206 213
207 def remove_old_directories(crowdin_api, old_directories): 214 def remove_old_directories(crowdin_api, old_directories):
208 for directory in reversed(sorted(old_directories, key=len)): 215 for directory in reversed(sorted(old_directories, key=len)):
209 logger.info("Removing old directory %s", directory) 216 logger.info("Removing old directory %s", directory)
210 crowdin_api.request("POST", "delete-directory", data={"name": directory}) 217 crowdin_api.request("POST", "delete-directory", data={"name": directory})
211 218
212 def download_translations(crowdin_api, source_dir, required_locales): 219 def download_translations(crowdin_api, source_dir, required_locales):
213 logger.info("Requesting generation of fresh translations archive...") 220 logger.info("Requesting generation of fresh translations archive...")
214 result = crowdin_api.request("GET", "export") 221 result = crowdin_api.request("GET", "export")
215 if result.get("success", {}).get("status") == "skipped": 222 if result.get("success", {}).get("status") == "skipped":
216 logger.warning("Archive generation skipped, either " 223 logger.warning("Archive generation skipped, either "
217 "no changes or API usage excessive") 224 "no changes or API usage excessive")
218 225
219 logger.info("Downloading translations archive...") 226 logger.info("Downloading translations archive...")
220 response = crowdin_api.connection.request( 227 response = crowdin_api.raw_request("GET", "download/all.zip")
221 "GET",
222 "/api/project/%s/download/all.zip?%s" % (
223 urllib.quote(crowdin_api.project_name),
224 urllib.urlencode([("key", crowdin_api.api_key)])
225 ), preload_content = False
226 )
227 if response.status < 200 or response.status >= 300:
228 raise urllib3.exceptions.HTTPError(response.status, response.data)
229 228
Sebastian Noack 2015/07/14 14:39:28 The logic above is duplicated in CrowdInApi.reques
kzar 2015/07/15 09:51:25 Done.
230 logger.info("Extracting translations archive...") 229 logger.info("Extracting translations archive...")
231 with zipfile.ZipFile(io.BytesIO(response.data), "r") as archive: 230 with zipfile.ZipFile(io.BytesIO(response.data), "r") as archive:
232 locale_path = os.path.join(source_dir, "locales") 231 locale_path = os.path.join(source_dir, "locales")
233 # First clear existing translation files 232 # First clear existing translation files
234 for root, dirs, files in os.walk(locale_path, topdown=True): 233 for root, dirs, files in os.walk(locale_path, topdown=True):
235 if root == locale_path: 234 if root == locale_path:
236 dirs[:] = [d for d in dirs if d in required_locales] 235 dirs[:] = [d for d in dirs if d in required_locales]
237 for f in files: 236 for f in files:
238 if f.endswith(".json"): 237 if f.lower().endswith(".json"):
239 os.remove(os.path.join(root, f)) 238 os.remove(os.path.join(root, f))
240 # Then extract the new ones in place 239 # Then extract the new ones in place
241 for member in archive.namelist(): 240 for member in archive.namelist():
242 path, file_name = posixpath.split(member) 241 path, file_name = posixpath.split(member)
243 ext = posixpath.splitext(file_name)[1] 242 ext = posixpath.splitext(file_name)[1]
244 locale = path.split(posixpath.sep)[0] 243 locale = path.split(posixpath.sep)[0]
245 if ext == ".json" and locale in required_locales: 244 if ext.lower() == ".json" and locale in required_locales:
Sebastian Noack 2015/07/14 14:39:29 Nit: I think we should match the file extension ca
kzar 2015/07/15 09:51:25 Done.
246 archive.extract(member, locale_path) 245 archive.extract(member, locale_path)
247 246
248 def crowdin_sync(source_dir, crowdin_api_key): 247 def crowdin_sync(source_dir, crowdin_api_key):
249 with FileSource(source_dir) as source: 248 with FileSource(source_dir) as source:
250 config = source.read_config() 249 config = source.read_config()
251 defaultlocale = config.get("general", "defaultlocale") 250 defaultlocale = config.get("general", "defaultlocale")
252 crowdin_project_name = config.get("general", "crowdin-project-name") 251 crowdin_project_name = config.get("general", "crowdin-project-name")
253 252
254 crowdin_api = CrowdinAPI(crowdin_api_key, crowdin_project_name) 253 crowdin_api = CrowdinAPI(crowdin_api_key, crowdin_project_name)
255 254
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
292 if __name__ == "__main__": 291 if __name__ == "__main__":
293 if len(sys.argv) < 3: 292 if len(sys.argv) < 3:
294 print >>sys.stderr, "Usage: python -m cms.bin.translate www_directory crowdi n_project_api_key [logging_level]" 293 print >>sys.stderr, "Usage: python -m cms.bin.translate www_directory crowdi n_project_api_key [logging_level]"
295 sys.exit(1) 294 sys.exit(1)
296 295
297 logging.basicConfig() 296 logging.basicConfig()
298 logger.setLevel(sys.argv[3] if len(sys.argv) > 3 else logging.INFO) 297 logger.setLevel(sys.argv[3] if len(sys.argv) > 3 else logging.INFO)
299 298
300 source_dir, crowdin_api_key = sys.argv[1:3] 299 source_dir, crowdin_api_key = sys.argv[1:3]
301 crowdin_sync(source_dir, crowdin_api_key) 300 crowdin_sync(source_dir, crowdin_api_key)
LEFTRIGHT

Powered by Google App Engine
This is Rietveld