Left: | ||
Right: |
LEFT | RIGHT |
---|---|
1 # coding: utf-8 | 1 # coding: utf-8 |
2 | 2 |
3 # This file is part of the Adblock Plus web scripts, | 3 # This file is part of the Adblock Plus web scripts, |
4 # Copyright (C) 2006-2015 Eyeo GmbH | 4 # Copyright (C) 2006-2015 Eyeo GmbH |
5 # | 5 # |
6 # Adblock Plus is free software: you can redistribute it and/or modify | 6 # Adblock Plus is free software: you can redistribute it and/or modify |
7 # it under the terms of the GNU General Public License version 3 as | 7 # it under the terms of the GNU General Public License version 3 as |
8 # published by the Free Software Foundation. | 8 # published by the Free Software Foundation. |
9 # | 9 # |
10 # Adblock Plus is distributed in the hope that it will be useful, | 10 # Adblock Plus is distributed in the hope that it will be useful, |
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of | 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
13 # GNU General Public License for more details. | 13 # GNU General Public License for more details. |
14 # | 14 # |
15 # You should have received a copy of the GNU General Public License | 15 # You should have received a copy of the GNU General Public License |
16 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. | 16 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. |
17 | 17 |
18 import collections | 18 import collections |
19 import io | 19 import io |
20 import itertools | 20 import itertools |
21 import json | 21 import json |
22 import logging | 22 import logging |
23 import os | 23 import os |
24 import posixpath | 24 import posixpath |
25 import sys | 25 import sys |
26 import urllib | |
26 import zipfile | 27 import zipfile |
27 | 28 |
28 import urllib3 | 29 import urllib3 |
29 | 30 |
30 import cms.utils | 31 import cms.utils |
31 from cms.sources import FileSource | 32 from cms.sources import FileSource |
32 | 33 |
33 logger = logging.getLogger("cms.bin.translate") | 34 logger = logging.getLogger("cms.bin.translate") |
34 | 35 |
35 class CrowdinAPI: | 36 class CrowdinAPI: |
36 FILES_PER_REQUEST = 20 | 37 FILES_PER_REQUEST = 20 |
37 | 38 |
38 def __init__(self, api_key, project_name): | 39 def __init__(self, api_key, project_name): |
39 self.api_key = api_key | 40 self.api_key = api_key |
40 self.project_name = project_name | 41 self.project_name = project_name |
41 self.connection = urllib3.connection_from_url("https://api.crowdin.com/") | 42 self.connection = urllib3.connection_from_url("https://api.crowdin.com/") |
42 | 43 |
44 def raw_request(self, request_method, api_endpoint, query_params=(), **kwargs) : | |
45 url = "/api/project/%s/%s?%s" % ( | |
46 urllib.quote(self.project_name), | |
47 urllib.quote(api_endpoint), | |
48 urllib.urlencode((("key", self.api_key),) + query_params) | |
49 ) | |
50 try: | |
51 response = self.connection.request( | |
52 request_method, str(url), **kwargs | |
53 ) | |
54 except urllib3.exceptions.HTTPError: | |
55 logger.error("Connection to API endpoint %s failed", url) | |
56 raise | |
57 if response.status < 200 or response.status >= 300: | |
58 logger.error("API call to %s failed:\n%s", url, response.data) | |
59 raise urllib3.exceptions.HTTPError(response.status) | |
60 return response | |
61 | |
43 def request(self, request_method, api_endpoint, data=None, files=None): | 62 def request(self, request_method, api_endpoint, data=None, files=None): |
44 url = "/api/project/%s/%s?key=%s&json=1" % ( | |
45 self.project_name, api_endpoint, self.api_key | |
Sebastian Noack
2015/07/14 11:31:05
Please encode the parameters properly:
url = "/ap
kzar
2015/07/14 12:54:27
Done.
| |
46 ) | |
47 | |
48 fields = [] | 63 fields = [] |
49 if data: | 64 if data: |
50 for name, value in data.iteritems(): | 65 for name, value in data.iteritems(): |
51 if isinstance(value, basestring): | 66 if isinstance(value, basestring): |
52 fields.append((name, value)) | 67 fields.append((name, value)) |
53 else: | 68 else: |
54 fields += [(name + "[]", v) for v in value] | 69 fields.extend((name + "[]", v) for v in value) |
Sebastian Noack
2015/07/14 11:31:05
Nit: fields.extend((name + "[]", v) for v in value
kzar
2015/07/14 12:54:27
Done.
| |
55 if files: | 70 if files: |
56 fields += [("files[%s]" % f[0], f) for f in files] | 71 fields.extend(("files[%s]" % f[0], f) for f in files) |
Sebastian Noack
2015/07/14 11:31:04
Note that |'%s' % f| returns it object representat
Sebastian Noack
2015/07/14 11:31:05
Nit: Please use .extend() here as well.
kzar
2015/07/14 12:54:27
So f[0] is actually the file name and we need to p
kzar
2015/07/14 12:54:30
Done.
| |
72 | |
73 response = self.raw_request( | |
74 request_method, api_endpoint, (("json", "1"),), | |
75 fields=fields, preload_content=False | |
76 ) | |
57 | 77 |
58 try: | 78 try: |
59 response = self.connection.request( | 79 return json.load(response) |
60 request_method, str(url), fields=fields, | |
61 timeout=urllib3.Timeout(connect=5) | |
Sebastian Noack
2015/07/14 11:31:07
Any particular reason you specify a custom connect
kzar
2015/07/14 12:54:30
During testing I found that by default it didn't s
Sebastian Noack
2015/07/14 14:39:27
But differently than urllib/urllib2 which we use e
kzar
2015/07/15 09:51:24
Done.
| |
62 ) | |
63 if response.status < 200 or response.status >= 300: | |
Sebastian Noack
2015/07/14 11:31:07
How about |response.status not in xrange(200, 299)
kzar
2015/07/14 12:54:28
I think I prefer it as is.
| |
64 raise urllib3.exceptions.HTTPError(response.status) | |
65 except urllib3.exceptions.HTTPError as e: | |
Sebastian Noack
2015/07/14 11:31:06
Nit: Since we don't use the variable e you can omi
kzar
2015/07/14 12:54:28
Done.
| |
66 logger.error("API call to %s failed:\n%s" % (url, response.data)) | |
Sebastian Noack
2015/07/14 11:31:05
You can pass the values for the placeholders direc
kzar
2015/07/14 12:54:30
Done.
| |
67 raise | |
68 | |
69 try: | |
70 return json.loads(response.data) | |
Sebastian Noack
2015/07/14 11:31:04
How about |json.load(response)|?
Sebastian Noack
2015/07/14 11:31:06
Note that like urllib/urllib2, urllib3's repsonse
kzar
2015/07/14 12:54:28
This doesn't work as you would expect, even though
Sebastian Noack
2015/07/14 14:39:27
I just tested it myself. And it did work. You have
Sebastian Noack
2015/07/14 14:39:28
For reference, I just realized that urllib3, magic
kzar
2015/07/15 09:51:23
Acknowledged.
kzar
2015/07/15 09:51:24
You're right, I forgot to set preload_content when
| |
71 except ValueError: | 80 except ValueError: |
72 logger.error("Invalid response returned by API endpoint %s" % url) | 81 logger.error("Invalid response returned by API endpoint %s", url) |
Sebastian Noack
2015/07/14 11:31:05
Same here: logger.error("Invalid response returned
kzar
2015/07/14 12:54:30
Done.
| |
73 raise | 82 raise |
74 | 83 |
75 | 84 |
76 def grouper(iterable, n): | 85 def grouper(iterable, n): |
77 iterator = iter(iterable) | 86 iterator = iter(iterable) |
78 while True: | 87 while True: |
79 chunk = tuple(itertools.islice(iterator, n)) | 88 chunk = tuple(itertools.islice(iterator, n)) |
80 if not chunk: | 89 if not chunk: |
81 break | 90 break |
82 yield chunk | 91 yield chunk |
(...skipping 27 matching lines...) Expand all Loading... | |
110 | 119 |
111 def configure_locales(crowdin_api, required_locales, enabled_locales, | 120 def configure_locales(crowdin_api, required_locales, enabled_locales, |
112 defaultlocale): | 121 defaultlocale): |
113 logger.info("Checking which locales are supported by Crowdin...") | 122 logger.info("Checking which locales are supported by Crowdin...") |
114 response = crowdin_api.request("GET", "supported-languages") | 123 response = crowdin_api.request("GET", "supported-languages") |
115 | 124 |
116 supported_locales = {l["crowdin_code"] for l in response} | 125 supported_locales = {l["crowdin_code"] for l in response} |
117 skipped_locales = required_locales - supported_locales | 126 skipped_locales = required_locales - supported_locales |
118 | 127 |
119 if skipped_locales: | 128 if skipped_locales: |
120 logger.warning("Ignoring locales that Crowdin doesn't support: %s" % ( | 129 logger.warning("Ignoring locales that Crowdin doesn't support: %s", |
121 ", ".join(skipped_locales) | 130 ", ".join(skipped_locales)) |
122 )) | |
123 required_locales -= skipped_locales | 131 required_locales -= skipped_locales |
124 | 132 |
125 if not required_locales.issubset(enabled_locales): | 133 if not required_locales.issubset(enabled_locales): |
126 logger.info("Enabling the required locales for the Crowdin project...") | 134 logger.info("Enabling the required locales for the Crowdin project...") |
127 crowdin_api.request( | 135 crowdin_api.request( |
128 "POST", "edit-project", | 136 "POST", "edit-project", |
129 data={"languages": list(enabled_locales | required_locales)} | 137 data={"languages": enabled_locales | required_locales} |
Sebastian Noack
2015/07/14 11:31:05
Since .request() merely iterates over the value (i
kzar
2015/07/14 12:54:30
Done.
| |
130 ) | 138 ) |
131 | 139 |
132 return required_locales | 140 return required_locales |
133 | 141 |
134 def list_remote_files(project_info): | 142 def list_remote_files(project_info): |
135 def parse_file_node(node, path=""): | 143 def parse_file_node(node, path=""): |
136 if node["node_type"] == "file": | 144 if node["node_type"] == "file": |
137 remote_files.add(path + node["name"]) | 145 remote_files.add(path + node["name"]) |
138 elif node["node_type"] == "directory": | 146 elif node["node_type"] == "directory": |
139 dir_name = path + node["name"] | 147 dir_name = path + node["name"] |
(...skipping 13 matching lines...) Expand all Loading... | |
153 for page, strings in page_strings.iteritems(): | 161 for page, strings in page_strings.iteritems(): |
154 if strings: | 162 if strings: |
155 local_files.add(page + ".json") | 163 local_files.add(page + ".json") |
156 while "/" in page: | 164 while "/" in page: |
157 page = page.rsplit("/", 1)[0] | 165 page = page.rsplit("/", 1)[0] |
158 local_directories.add(page) | 166 local_directories.add(page) |
159 return local_files, local_directories | 167 return local_files, local_directories |
160 | 168 |
161 def create_directories(crowdin_api, directories): | 169 def create_directories(crowdin_api, directories): |
162 for directory in directories: | 170 for directory in directories: |
163 logger.info("Creating directory %s" % directory) | 171 logger.info("Creating directory %s", directory) |
164 crowdin_api.request("POST", "add-directory", data={"name": directory}) | 172 crowdin_api.request("POST", "add-directory", data={"name": directory}) |
165 | 173 |
166 def add_update_files(crowdin_api, api_endpoint, message, files, page_strings): | 174 def add_update_files(crowdin_api, api_endpoint, message, files, page_strings): |
167 for group in grouper(files, crowdin_api.FILES_PER_REQUEST): | 175 for group in grouper(files, crowdin_api.FILES_PER_REQUEST): |
168 files = [] | 176 files = [] |
169 for file_name in group: | 177 for file_name in group: |
170 page = os.path.splitext(file_name)[0] | 178 page = os.path.splitext(file_name)[0] |
171 files.append((file_name, json.dumps(page_strings[page]), "application/json ")) | 179 files.append((file_name, json.dumps(page_strings[page]), "application/json ")) |
172 del page_strings[page] | 180 del page_strings[page] |
173 logger.info(message % len(files)) | 181 logger.info(message, len(files)) |
174 crowdin_api.request("POST", api_endpoint, files=files) | 182 crowdin_api.request("POST", api_endpoint, files=files) |
175 | 183 |
176 def upload_new_files(crowdin_api, new_files, page_strings): | 184 def upload_new_files(crowdin_api, new_files, page_strings): |
177 add_update_files(crowdin_api, "add-file", "Uploading %d new pages...", | 185 add_update_files(crowdin_api, "add-file", "Uploading %d new pages...", |
178 new_files, page_strings) | 186 new_files, page_strings) |
179 | 187 |
180 def update_existing_files(crowdin_api, existing_files, page_strings): | 188 def update_existing_files(crowdin_api, existing_files, page_strings): |
181 add_update_files(crowdin_api, "update-file", "Updating %d existing pages...", | 189 add_update_files(crowdin_api, "update-file", "Updating %d existing pages...", |
182 existing_files, page_strings) | 190 existing_files, page_strings) |
183 | 191 |
184 def upload_translations(crowdin_api, source_dir, new_files, required_locales): | 192 def upload_translations(crowdin_api, source_dir, new_files, required_locales): |
185 def open_locale_files(locale, files): | 193 def open_locale_files(locale, files): |
186 for file_name in files: | 194 for file_name in files: |
187 path = os.path.join(source_dir, "locales", locale, file_name) | 195 path = os.path.join(source_dir, "locales", locale, file_name) |
188 if os.path.isfile(path): | 196 if os.path.isfile(path): |
189 with open(path, "r") as f: | 197 with open(path, "rb") as f: |
190 yield (file_name, f.read(), "application/json") | 198 yield (file_name, f.read(), "application/json") |
191 | 199 |
192 if new_files: | 200 if new_files: |
193 for locale in required_locales: | 201 for locale in required_locales: |
194 for files in grouper(open_locale_files(locale, new_files), | 202 for files in grouper(open_locale_files(locale, new_files), |
195 crowdin_api.FILES_PER_REQUEST): | 203 crowdin_api.FILES_PER_REQUEST): |
196 logger.info("Uploading %d existing translation " | 204 logger.info("Uploading %d existing translation " |
197 "files for locale %s..." % (len(files), locale)) | 205 "files for locale %s...", len(files), locale) |
198 crowdin_api.request("POST", "upload-translation", files=files, | 206 crowdin_api.request("POST", "upload-translation", files=files, |
199 data={"language": locale}) | 207 data={"language": locale}) |
200 | 208 |
201 def remove_old_files(crowdin_api, old_files): | 209 def remove_old_files(crowdin_api, old_files): |
202 for file_name in old_files: | 210 for file_name in old_files: |
203 logger.info("Removing old file %s" % file_name) | 211 logger.info("Removing old file %s", file_name) |
204 crowdin_api.request("POST", "delete-file", data={"file": file_name}) | 212 crowdin_api.request("POST", "delete-file", data={"file": file_name}) |
205 | 213 |
206 def remove_old_directories(crowdin_api, old_directories): | 214 def remove_old_directories(crowdin_api, old_directories): |
207 for directory in reversed(sorted(old_directories, key=len)): | 215 for directory in reversed(sorted(old_directories, key=len)): |
208 logger.info("Removing old directory %s" % directory) | 216 logger.info("Removing old directory %s", directory) |
209 crowdin_api.request("POST", "delete-directory", data={"name": directory}) | 217 crowdin_api.request("POST", "delete-directory", data={"name": directory}) |
210 | 218 |
211 def download_translations(crowdin_api, source_dir, required_locales): | 219 def download_translations(crowdin_api, source_dir, required_locales): |
212 logger.info("Requesting generation of fresh translations archive...") | 220 logger.info("Requesting generation of fresh translations archive...") |
213 result = crowdin_api.request("GET", "export") | 221 result = crowdin_api.request("GET", "export") |
214 if result.get("success", {}).get("status") == "skipped": | 222 if result.get("success", {}).get("status") == "skipped": |
215 logger.warning("Archive generation skipped, either " | 223 logger.warning("Archive generation skipped, either " |
216 "no changes or API usage excessive") | 224 "no changes or API usage excessive") |
217 | 225 |
218 logger.info("Downloading translations archive...") | 226 logger.info("Downloading translations archive...") |
219 response = crowdin_api.connection.request( | 227 response = crowdin_api.raw_request("GET", "download/all.zip") |
220 "GET", | |
221 "/api/project/%s/download/all.zip?key=%s" % ( | |
Sebastian Noack
2015/07/14 11:31:07
As above, please use urllib.quote and urllib.urlen
kzar
2015/07/14 12:54:27
Done.
| |
222 crowdin_api.project_name, crowdin_api.api_key | |
223 ), preload_content = False | |
224 ) | |
225 if response.status < 200 or response.status >= 300: | |
Sebastian Noack
2015/07/14 11:31:05
How about |response.status not in xrange(200, 299)
kzar
2015/07/14 12:54:28
See above, I prefer it as is.
| |
226 raise urllib3.exceptions.HTTPError(response.status, response.data) | |
227 | 228 |
228 logger.info("Extracting translations archive...") | 229 logger.info("Extracting translations archive...") |
229 with zipfile.ZipFile(io.BytesIO(response.data), "r") as archive: | 230 with zipfile.ZipFile(io.BytesIO(response.data), "r") as archive: |
Sebastian Noack
2015/07/14 11:31:04
The response is a file-like object by itself. So i
kzar
2015/07/14 12:54:28
I agree this _should_ work but in practice it just
Sebastian Noack
2015/07/14 14:39:28
Ah right, ZipFile() requires a file-like object th
kzar
2015/07/15 09:51:24
Glad one of us understands :p
| |
230 locale_path = os.path.join(source_dir, "locales") | 231 locale_path = os.path.join(source_dir, "locales") |
231 # First clear existing translation files | 232 # First clear existing translation files |
232 for root, dirs, files in os.walk(locale_path, topdown=True): | 233 for root, dirs, files in os.walk(locale_path, topdown=True): |
233 if root == locale_path: | 234 if root == locale_path: |
234 dirs[:] = [d for d in dirs if d in required_locales] | 235 dirs[:] = [d for d in dirs if d in required_locales] |
235 for f in files: | 236 for f in files: |
236 if f.endswith(".json"): | 237 if f.lower().endswith(".json"): |
237 os.remove(os.path.join(root, f)) | 238 os.remove(os.path.join(root, f)) |
238 # Then extract the new ones in place | 239 # Then extract the new ones in place |
239 for member in archive.namelist(): | 240 for member in archive.namelist(): |
240 path, file_name = posixpath.split(member) | 241 path, file_name = posixpath.split(member) |
241 ext = posixpath.splitext(file_name)[1] | 242 ext = posixpath.splitext(file_name)[1] |
242 locale = path.split(posixpath.sep)[0] | 243 locale = path.split(posixpath.sep)[0] |
243 if ext == ".json" and locale in required_locales: | 244 if ext.lower() == ".json" and locale in required_locales: |
244 archive.extract(member, locale_path) | 245 archive.extract(member, locale_path) |
245 | 246 |
246 def crowdin_sync(source_dir, crowdin_api_key): | 247 def crowdin_sync(source_dir, crowdin_api_key): |
247 with FileSource(source_dir) as source: | 248 with FileSource(source_dir) as source: |
248 config = source.read_config() | 249 config = source.read_config() |
249 defaultlocale = config.get("general", "defaultlocale") | 250 defaultlocale = config.get("general", "defaultlocale") |
250 crowdin_project_name = config.get("general", "crowdin-project-name") | 251 crowdin_project_name = config.get("general", "crowdin-project-name") |
251 | 252 |
252 crowdin_api = CrowdinAPI(crowdin_api_key, crowdin_project_name) | 253 crowdin_api = CrowdinAPI(crowdin_api_key, crowdin_project_name) |
253 | 254 |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
290 if __name__ == "__main__": | 291 if __name__ == "__main__": |
291 if len(sys.argv) < 3: | 292 if len(sys.argv) < 3: |
292 print >>sys.stderr, "Usage: python -m cms.bin.translate www_directory crowdi n_project_api_key [logging_level]" | 293 print >>sys.stderr, "Usage: python -m cms.bin.translate www_directory crowdi n_project_api_key [logging_level]" |
293 sys.exit(1) | 294 sys.exit(1) |
294 | 295 |
295 logging.basicConfig() | 296 logging.basicConfig() |
296 logger.setLevel(sys.argv[3] if len(sys.argv) > 3 else logging.INFO) | 297 logger.setLevel(sys.argv[3] if len(sys.argv) > 3 else logging.INFO) |
297 | 298 |
298 source_dir, crowdin_api_key = sys.argv[1:3] | 299 source_dir, crowdin_api_key = sys.argv[1:3] |
299 crowdin_sync(source_dir, crowdin_api_key) | 300 crowdin_sync(source_dir, crowdin_api_key) |
LEFT | RIGHT |