Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29336327: Issue 2834 - Enable translations for adblockplusui (Closed)
Patch Set: Add link to Crowdin Created Feb. 15, 2016, 4:50 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « dependencies ('k') | metadata.generic » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 #!/usr/bin/env python
2 # coding: utf-8
3
4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7
8 import sys
9 import os
10 import posixpath
11 import re
12 import io
13 import errno
14 import logging
15 import subprocess
16 import urlparse
17 import argparse
18
19 from collections import OrderedDict
20 from ConfigParser import RawConfigParser
21
22 USAGE = """
23 A dependencies file should look like this:
24
25 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py
29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
41 """
42
43 SKIP_DEPENDENCY_UPDATES = os.environ.get(
44 "SKIP_DEPENDENCY_UPDATES", ""
45 ).lower() not in ("", "0", "false")
46
47 class Mercurial():
48 def istype(self, repodir):
49 return os.path.exists(os.path.join(repodir, ".hg"))
50
51 def clone(self, source, target):
52 if not source.endswith("/"):
53 source += "/"
54 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t])
55
56 def get_revision_id(self, repo, rev=None):
57 command = ["hg", "id", "--repository", repo, "--id"]
58 if rev:
59 command.extend(["--rev", rev])
60
61 # Ignore stderr output and return code here: if revision lookup failed we
62 # should simply return an empty string.
63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0]
64 return result.strip()
65
66 def pull(self, repo):
67 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"])
68
69 def update(self, repo, rev, revname):
70 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev])
71
72 def ignore(self, target, repo):
73
74 if not self.istype(target):
75
76 config_path = os.path.join(repo, ".hg", "hgrc")
77 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies"))
78
79 config = RawConfigParser()
80 config.read(config_path)
81
82 if not config.has_section("ui"):
83 config.add_section("ui")
84
85 config.set("ui", "ignore.dependencies", ignore_path)
86 with open(config_path, "w") as stream:
87 config.write(stream)
88
89 module = os.path.relpath(target, repo)
90 _ensure_line_exists(ignore_path, module)
91
92 def postprocess_url(self, url):
93 return url
94
95 class Git():
96 def istype(self, repodir):
97 return os.path.exists(os.path.join(repodir, ".git"))
98
99 def clone(self, source, target):
100 source = source.rstrip("/")
101 if not source.endswith(".git"):
102 source += ".git"
103 subprocess.check_call(["git", "clone", "--quiet", source, target])
104
105 def get_revision_id(self, repo, rev="HEAD"):
106 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}']
107 return subprocess.check_output(command, cwd=repo).strip()
108
109 def pull(self, repo):
110 # Fetch tracked branches, new tags and the list of available remote branches
111 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po)
112 # Next we need to ensure all remote branches are tracked
113 newly_tracked = False
114 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo)
115 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
116 remote, local = match.groups()
117 with open(os.devnull, "wb") as devnull:
118 if subprocess.call(["git", "branch", "--track", local, remote],
119 cwd=repo, stdout=devnull, stderr=devnull) == 0:
120 newly_tracked = True
121 # Finally fetch any newly tracked remote branches
122 if newly_tracked:
123 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo)
124
125 def update(self, repo, rev, revname):
126 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo)
127
128 def ignore(self, target, repo):
129 module = os.path.sep + os.path.relpath(target, repo)
130 exclude_file = os.path.join(repo, ".git", "info", "exclude")
131 _ensure_line_exists(exclude_file, module)
132
133 def postprocess_url(self, url):
134 # Handle alternative syntax of SSH URLS
135 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
136 return "ssh://" + url.replace(":", "/", 1)
137 return url
138
139 repo_types = OrderedDict((
140 ("hg", Mercurial()),
141 ("git", Git()),
142 ))
143
144 # [vcs:]value
145 item_regexp = re.compile(
146 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) +"):)?"
147 "(.+)$"
148 )
149
150 # [url@]rev
151 source_regexp = re.compile(
152 "^(?:(.*)@)?"
153 "(.+)$"
154 )
155
156 def merge_seqs(seq1, seq2):
157 """Return a list of any truthy values from the suplied sequences
158
159 (None, 2), (1,) => [1, 2]
160 None, (1, 2) => [1, 2]
161 (1, 2), (3, 4) => [3, 4]
162 """
163 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
164
165 def parse_spec(path, line):
166 if "=" not in line:
167 logging.warning("Invalid line in file %s: %s" % (path, line))
168 return None, None
169
170 key, value = line.split("=", 1)
171 key = key.strip()
172 items = value.split()
173 if not len(items):
174 logging.warning("No value specified for key %s in file %s" % (key, path))
175 return key, None
176
177 result = OrderedDict()
178 is_dependency_field = not key.startswith("_")
179
180 for i, item in enumerate(items):
181 try:
182 vcs, value = re.search(item_regexp, item).groups()
183 vcs = vcs or "*"
184 if is_dependency_field:
185 if i == 0 and vcs == "*":
186 # In order to be backwards compatible we have to assume that the first
187 # source contains only a URL/path for the repo if it does not contain
188 # the VCS part
189 url_rev = (value, None)
190 else:
191 url_rev = re.search(source_regexp, value).groups()
192 result[vcs] = merge_seqs(result.get(vcs), url_rev)
193 else:
194 if vcs in result:
195 logging.warning("Ignoring duplicate value for type %r "
196 "(key %r in file %r)" % (vcs, key, path))
197 result[vcs] = value
198 except AttributeError:
199 logging.warning("Ignoring invalid item %r for type %r "
200 "(key %r in file %r)" % (item, vcs, key, path))
201 continue
202 return key, result
203
204 def read_deps(repodir):
205 result = {}
206 deps_path = os.path.join(repodir, "dependencies")
207 try:
208 with io.open(deps_path, "rt", encoding="utf-8") as handle:
209 for line in handle:
210 # Remove comments and whitespace
211 line = re.sub(r"#.*", "", line).strip()
212 if not line:
213 continue
214
215 key, spec = parse_spec(deps_path, line)
216 if spec:
217 result[key] = spec
218 return result
219 except IOError, e:
220 if e.errno != errno.ENOENT:
221 raise
222 return None
223
224 def safe_join(path, subpath):
225 # This has been inspired by Flask's safe_join() function
226 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
227 if any(sep in subpath for sep in forbidden):
228 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
229
230 normpath = posixpath.normpath(subpath)
231 if posixpath.isabs(normpath):
232 raise Exception("Dependency path %s cannot be absolute" % subpath)
233 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
234 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
235 return os.path.join(path, *normpath.split(posixpath.sep))
236
237 def get_repo_type(repo):
238 for name, repotype in repo_types.iteritems():
239 if repotype.istype(repo):
240 return name
241 return None
242
243 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
244 if os.path.exists(target):
245 return
246
247 if SKIP_DEPENDENCY_UPDATES:
248 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
249 "%s not cloned", target)
250 return
251
252 postprocess_url = repo_types[type].postprocess_url
253 root = postprocess_url(root)
254 sourcename = postprocess_url(sourcename)
255
256 if os.path.exists(root):
257 url = os.path.join(root, sourcename)
258 else:
259 url = urlparse.urljoin(root, sourcename)
260
261 logging.info("Cloning repository %s into %s" % (url, target))
262 repo_types[type].clone(url, target)
263 repo_types[parenttype].ignore(target, parentrepo)
264
265 def update_repo(target, type, revision):
266 resolved_revision = repo_types[type].get_revision_id(target, revision)
267 current_revision = repo_types[type].get_revision_id(target)
268
269 if resolved_revision != current_revision:
270 if SKIP_DEPENDENCY_UPDATES:
271 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
272 "%s not checked out to %s", target, revision)
273 return
274
275 if not resolved_revision:
276 logging.info("Revision %s is unknown, downloading remote changes" % revisi on)
277 repo_types[type].pull(target)
278 resolved_revision = repo_types[type].get_revision_id(target, revision)
279 if not resolved_revision:
280 raise Exception("Failed to resolve revision %s" % revision)
281
282 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision))
283 repo_types[type].update(target, resolved_revision, revision)
284
285 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
286 config = read_deps(repodir)
287 if config is None:
288 if level == 0:
289 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE))
290 return
291 if level >= 10:
292 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
293 return
294
295 if overrideroots is not None:
296 config["_root"] = overrideroots
297
298 for dir, sources in config.iteritems():
299 if (dir.startswith("_") or
300 skipdependencies.intersection([s[0] for s in sources if s[0]])):
301 continue
302
303 target = safe_join(repodir, dir)
304 parenttype = get_repo_type(repodir)
305 _root = config.get("_root", {})
306
307 for key in sources.keys() + _root.keys():
308 if key == parenttype or key is None and vcs != "*":
309 vcs = key
310 source, rev = merge_seqs(sources.get("*"), sources.get(vcs))
311
312 if not (vcs and source and rev):
313 logging.warning("No valid source / revision found to create %s" % target)
314 continue
315
316 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source)
317 update_repo(target, vcs, rev)
318 resolve_deps(target, level + 1, self_update=False,
319 overrideroots=overrideroots, skipdependencies=skipdependencies)
320
321 if self_update and "_self" in config and "*" in config["_self"]:
322 source = safe_join(repodir, config["_self"]["*"])
323 try:
324 with io.open(source, "rb") as handle:
325 sourcedata = handle.read()
326 except IOError, e:
327 if e.errno != errno.ENOENT:
328 raise
329 logging.warning("File %s doesn't exist, skipping self-update" % source)
330 return
331
332 target = __file__
333 with io.open(target, "rb") as handle:
334 targetdata = handle.read()
335
336 if sourcedata != targetdata:
337 logging.info("Updating %s from %s, don't forget to commit" % (source, targ et))
338 with io.open(target, "wb") as handle:
339 handle.write(sourcedata)
340 if __name__ == "__main__":
341 logging.info("Restarting %s" % target)
342 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:])
343 else:
344 logging.warning("Cannot restart %s automatically, please rerun" % target )
345
346 def _ensure_line_exists(path, pattern):
347 with open(path, 'a+') as f:
348 file_content = [l.strip() for l in f.readlines()]
349 if not pattern in file_content:
350 file_content.append(pattern)
351 f.seek(0, os.SEEK_SET)
352 f.truncate()
353 for l in file_content:
354 print >>f, l
355
356 if __name__ == "__main__":
357 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
358
359 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.")
360 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path")
361 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output")
362 args = parser.parse_args()
363
364 if args.quiet:
365 logging.disable(logging.INFO)
366
367 repos = args.repos
368 if not len(repos):
369 repos = [os.path.dirname(__file__)]
370 for repo in repos:
371 resolve_deps(repo)
OLDNEW
« no previous file with comments | « dependencies ('k') | metadata.generic » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld