Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29321227: Issue 2735 - Use ensure_dependencies.py in adblockplusie (Closed)
Patch Set: Created June 30, 2015, 12:43 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« createsolution.bat ('K') | « dependencies ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 #!/usr/bin/env python
2 # coding: utf-8
3
4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7
8 import sys
9 import os
10 import posixpath
11 import re
12 import io
13 import errno
14 import logging
15 import subprocess
16 import urlparse
17 import argparse
18
19 from collections import OrderedDict
20 from ConfigParser import RawConfigParser
21
22 USAGE = """
23 A dependencies file should look like this:
24
25 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory
30 # at tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific
33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 """
36
37 SKIP_DEPENDENCY_UPDATES = os.environ.get(
Felix Dahlke 2015/07/02 10:02:06 This is a newer version of the script then what's
38 "SKIP_DEPENDENCY_UPDATES", ""
39 ).lower() not in ("", "0", "false")
40
41 class Mercurial():
42 def istype(self, repodir):
43 return os.path.exists(os.path.join(repodir, ".hg"))
44
45 def clone(self, source, target):
46 if not source.endswith("/"):
47 source += "/"
48 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t])
49
50 def get_revision_id(self, repo, rev=None):
51 command = ["hg", "id", "--repository", repo, "--id"]
52 if rev:
53 command.extend(["--rev", rev])
54
55 # Ignore stderr output and return code here: if revision lookup failed we
56 # should simply return an empty string.
57 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0]
58 return result.strip()
59
60 def pull(self, repo):
61 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"])
62
63 def update(self, repo, rev):
64 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev])
65
66 def ignore(self, target, repo):
67
68 if not self.istype(target):
69
70 config_path = os.path.join(repo, ".hg", "hgrc")
71 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies"))
72
73 config = RawConfigParser()
74 config.read(config_path)
75
76 if not config.has_section("ui"):
77 config.add_section("ui")
78
79 config.set("ui", "ignore.dependencies", ignore_path)
80 with open(config_path, "w") as stream:
81 config.write(stream)
82
83 module = os.path.relpath(target, repo)
84 _ensure_line_exists(ignore_path, module)
85
86 def postprocess_url(self, url):
87 return url
88
89 class Git():
90 def istype(self, repodir):
91 return os.path.exists(os.path.join(repodir, ".git"))
92
93 def clone(self, source, target):
94 source = source.rstrip("/")
95 if not source.endswith(".git"):
96 source += ".git"
97 subprocess.check_call(["git", "clone", "--quiet", source, target])
98
99 def get_revision_id(self, repo, rev="HEAD"):
100 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}']
101 return subprocess.check_output(command, cwd=repo).strip()
102
103 def pull(self, repo):
104 # Fetch tracked branches, new tags and the list of available remote branches
105 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po)
106 # Next we need to ensure all remote branches are tracked
107 newly_tracked = False
108 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo)
109 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
110 remote, local = match.groups()
111 with open(os.devnull, "wb") as devnull:
112 if subprocess.call(["git", "branch", "--track", local, remote],
113 cwd=repo, stdout=devnull, stderr=devnull) == 0:
114 newly_tracked = True
115 # Finally fetch any newly tracked remote branches
116 if newly_tracked:
117 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo)
118
119 def update(self, repo, rev):
120 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo)
121
122 def ignore(self, target, repo):
123 module = os.path.relpath(target, repo)
124 exclude_file = os.path.join(repo, ".git", "info", "exclude")
125 _ensure_line_exists(exclude_file, module)
126
127 def postprocess_url(self, url):
128 # Handle alternative syntax of SSH URLS
129 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
130 return "ssh://" + url.replace(":", "/", 1)
131 return url
132
133 repo_types = OrderedDict((
134 ("hg", Mercurial()),
135 ("git", Git()),
136 ))
137
138 def parse_spec(path, line):
139 if "=" not in line:
140 logging.warning("Invalid line in file %s: %s" % (path, line))
141 return None, None
142
143 key, value = line.split("=", 1)
144 key = key.strip()
145 items = value.split()
146 if not len(items):
147 logging.warning("No value specified for key %s in file %s" % (key, path))
148 return key, None
149
150 result = OrderedDict()
151 if not key.startswith("_"):
152 result["_source"] = items.pop(0)
153
154 for item in items:
155 if ":" in item:
156 type, value = item.split(":", 1)
157 else:
158 type, value = ("*", item)
159 if type in result:
160 logging.warning("Ignoring duplicate value for type %s (key %s in file %s)" % (type, key, path))
161 else:
162 result[type] = value
163 return key, result
164
165 def read_deps(repodir):
166 result = {}
167 deps_path = os.path.join(repodir, "dependencies")
168 try:
169 with io.open(deps_path, "rt", encoding="utf-8") as handle:
170 for line in handle:
171 # Remove comments and whitespace
172 line = re.sub(r"#.*", "", line).strip()
173 if not line:
174 continue
175
176 key, spec = parse_spec(deps_path, line)
177 if spec:
178 result[key] = spec
179 return result
180 except IOError, e:
181 if e.errno != errno.ENOENT:
182 raise
183 return None
184
185 def safe_join(path, subpath):
186 # This has been inspired by Flask's safe_join() function
187 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
188 if any(sep in subpath for sep in forbidden):
189 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
190
191 normpath = posixpath.normpath(subpath)
192 if posixpath.isabs(normpath):
193 raise Exception("Dependency path %s cannot be absolute" % subpath)
194 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
195 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
196 return os.path.join(path, *normpath.split(posixpath.sep))
197
198 def get_repo_type(repo):
199 for name, repotype in repo_types.iteritems():
200 if repotype.istype(repo):
201 return name
202 return None
203
204 def ensure_repo(parentrepo, target, roots, sourcename):
205 if os.path.exists(target):
206 return
207
208 if SKIP_DEPENDENCY_UPDATES:
209 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
210 "%s not cloned", target)
211 return
212
213 parenttype = get_repo_type(parentrepo)
214 type = None
215 for key in roots:
216 if key == parenttype or (key in repo_types and type is None):
217 type = key
218 if type is None:
219 raise Exception("No valid source found to create %s" % target)
220
221 postprocess_url = repo_types[type].postprocess_url
222 root = postprocess_url(roots[type])
223 sourcename = postprocess_url(sourcename)
224
225 if os.path.exists(root):
226 url = os.path.join(root, sourcename)
227 else:
228 url = urlparse.urljoin(root, sourcename)
229
230 logging.info("Cloning repository %s into %s" % (url, target))
231 repo_types[type].clone(url, target)
232
233 for repo in repo_types.itervalues():
234 if repo.istype(parentrepo):
235 repo.ignore(target, parentrepo)
236
237 def update_repo(target, revisions):
238 type = get_repo_type(target)
239 if type is None:
240 logging.warning("Type of repository %s unknown, skipping update" % target)
241 return
242
243 if type in revisions:
244 revision = revisions[type]
245 elif "*" in revisions:
246 revision = revisions["*"]
247 else:
248 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
249 return
250
251 resolved_revision = repo_types[type].get_revision_id(target, revision)
252 current_revision = repo_types[type].get_revision_id(target)
253
254 if resolved_revision != current_revision:
255 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
257 "%s not checked out to %s", target, revision)
258 return
259
260 if not resolved_revision:
261 logging.info("Revision %s is unknown, downloading remote changes" % revisi on)
262 repo_types[type].pull(target)
263 resolved_revision = repo_types[type].get_revision_id(target, revision)
264 if not resolved_revision:
265 raise Exception("Failed to resolve revision %s" % revision)
266
267 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision))
268 repo_types[type].update(target, resolved_revision)
269
270 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
271 config = read_deps(repodir)
272 if config is None:
273 if level == 0:
274 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE))
275 return
276 if level >= 10:
277 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
278 return
279
280 if overrideroots is not None:
281 config["_root"] = overrideroots
282
283 for dir, revisions in config.iteritems():
284 if dir.startswith("_") or revisions["_source"] in skipdependencies:
285 continue
286 target = safe_join(repodir, dir)
287 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"])
288 update_repo(target, revisions)
289 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies)
290
291 if self_update and "_self" in config and "*" in config["_self"]:
292 source = safe_join(repodir, config["_self"]["*"])
293 try:
294 with io.open(source, "rb") as handle:
295 sourcedata = handle.read()
296 except IOError, e:
297 if e.errno != errno.ENOENT:
298 raise
299 logging.warning("File %s doesn't exist, skipping self-update" % source)
300 return
301
302 target = __file__
303 with io.open(target, "rb") as handle:
304 targetdata = handle.read()
305
306 if sourcedata != targetdata:
307 logging.info("Updating %s from %s, don't forget to commit" % (source, targ et))
308 with io.open(target, "wb") as handle:
309 handle.write(sourcedata)
310 if __name__ == "__main__":
311 logging.info("Restarting %s" % target)
312 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:])
313 else:
314 logging.warning("Cannot restart %s automatically, please rerun" % target )
315
316 def _ensure_line_exists(path, pattern):
317 with open(path, 'a+') as f:
318 file_content = [l.strip() for l in f.readlines()]
319 if not pattern in file_content:
320 file_content.append(pattern)
321 f.seek(0, os.SEEK_SET)
322 f.truncate()
323 for l in file_content:
324 print >>f, l
325
326 if __name__ == "__main__":
327 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
328
329 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.")
330 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path")
331 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output")
332 args = parser.parse_args()
333
334 if args.quiet:
335 logging.disable(logging.INFO)
336
337 repos = args.repos
338 if not len(repos):
339 repos = [os.path.dirname(__file__)]
340 for repo in repos:
341 resolve_deps(repo)
OLDNEW
« createsolution.bat ('K') | « dependencies ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld