Left: | ||
Right: |
OLD | NEW |
---|---|
1 #!/usr/bin/env python | |
2 # coding: utf-8 | 1 # coding: utf-8 |
3 | 2 |
4 # This Source Code Form is subject to the terms of the Mozilla Public | 3 # This Source Code Form is subject to the terms of the Mozilla Public |
5 # License, v. 2.0. If a copy of the MPL was not distributed with this | 4 # License, v. 2.0. If a copy of the MPL was not distributed with this |
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
7 | 6 |
8 import sys | 7 import sys |
9 import os | 8 import os |
10 import posixpath | 9 import posixpath |
11 import re | 10 import re |
12 import io | 11 import io |
13 import errno | 12 import errno |
14 import logging | 13 import logging |
15 import subprocess | 14 import traceback |
16 import urlparse | 15 import urlparse |
17 import argparse | 16 import argparse |
18 | |
19 from collections import OrderedDict | 17 from collections import OrderedDict |
20 from ConfigParser import RawConfigParser | 18 from ConfigParser import RawConfigParser |
21 | 19 |
20 from buildtools.vcs import repo_types | |
21 | |
22 USAGE = """ | 22 USAGE = """ |
23 A dependencies file should look like this: | 23 A dependencies file should look like this: |
24 | 24 |
25 # VCS-specific root URLs for the repositories | 25 # VCS-specific root URLs for the repositories |
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ | 26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ |
27 # File to update this script from (optional) | 27 # Enable self-updates |
28 _self = buildtools/ensure_dependencies.py | 28 _self = true |
29 # Directory to be added to module search path when locating | |
30 # buildtools.ensure_dependencies module (optional, for self-update) | |
31 _module_path = subdir | |
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory | 32 # Check out elemhidehelper repository into extensions/elemhidehelper directory |
30 # at tag "1.2". | 33 # at tag "1.2". |
31 extensions/elemhidehelper = elemhidehelper 1.2 | 34 extensions/elemhidehelper = elemhidehelper 1.2 |
32 # Check out buildtools repository into buildtools directory at VCS-specific | 35 # Check out buildtools repository into buildtools directory at VCS-specific |
33 # revision IDs. | 36 # revision IDs. |
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 | 37 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 |
35 """ | 38 """ |
36 | 39 |
37 SKIP_DEPENDENCY_UPDATES = os.environ.get( | 40 SKIP_DEPENDENCY_UPDATES = os.environ.get( |
38 "SKIP_DEPENDENCY_UPDATES", "" | 41 "SKIP_DEPENDENCY_UPDATES", "" |
39 ).lower() not in ("", "0", "false") | 42 ).lower() not in ("", "0", "false") |
40 | 43 |
41 class Mercurial(): | |
42 def istype(self, repodir): | |
43 return os.path.exists(os.path.join(repodir, ".hg")) | |
44 | |
45 def clone(self, source, target): | |
46 if not source.endswith("/"): | |
47 source += "/" | |
48 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t]) | |
49 | |
50 def get_revision_id(self, repo, rev=None): | |
51 command = ["hg", "id", "--repository", repo, "--id"] | |
52 if rev: | |
53 command.extend(["--rev", rev]) | |
54 | |
55 # Ignore stderr output and return code here: if revision lookup failed we | |
56 # should simply return an empty string. | |
57 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0] | |
58 return result.strip() | |
59 | |
60 def pull(self, repo): | |
61 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) | |
62 | |
63 def update(self, repo, rev): | |
64 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev]) | |
65 | |
66 def ignore(self, target, repo): | |
67 | |
68 if not self.istype(target): | |
69 | |
70 config_path = os.path.join(repo, ".hg", "hgrc") | |
71 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies")) | |
72 | |
73 config = RawConfigParser() | |
74 config.read(config_path) | |
75 | |
76 if not config.has_section("ui"): | |
77 config.add_section("ui") | |
78 | |
79 config.set("ui", "ignore.dependencies", ignore_path) | |
80 with open(config_path, "w") as stream: | |
81 config.write(stream) | |
82 | |
83 module = os.path.relpath(target, repo) | |
84 _ensure_line_exists(ignore_path, module) | |
85 | |
86 def postprocess_url(self, url): | |
87 return url | |
88 | |
89 class Git(): | |
90 def istype(self, repodir): | |
91 return os.path.exists(os.path.join(repodir, ".git")) | |
92 | |
93 def clone(self, source, target): | |
94 source = source.rstrip("/") | |
95 if not source.endswith(".git"): | |
96 source += ".git" | |
97 subprocess.check_call(["git", "clone", "--quiet", source, target]) | |
98 | |
99 def get_revision_id(self, repo, rev="HEAD"): | |
100 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] | |
101 return subprocess.check_output(command, cwd=repo).strip() | |
102 | |
103 def pull(self, repo): | |
104 # Fetch tracked branches, new tags and the list of available remote branches | |
105 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po) | |
106 # Next we need to ensure all remote branches are tracked | |
107 newly_tracked = False | |
108 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo) | |
109 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): | |
110 remote, local = match.groups() | |
111 with open(os.devnull, "wb") as devnull: | |
112 if subprocess.call(["git", "branch", "--track", local, remote], | |
113 cwd=repo, stdout=devnull, stderr=devnull) == 0: | |
114 newly_tracked = True | |
115 # Finally fetch any newly tracked remote branches | |
116 if newly_tracked: | |
117 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo) | |
118 | |
119 def update(self, repo, rev): | |
120 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) | |
121 | |
122 def ignore(self, target, repo): | |
123 module = os.path.relpath(target, repo) | |
124 exclude_file = os.path.join(repo, ".git", "info", "exclude") | |
125 _ensure_line_exists(exclude_file, module) | |
126 | |
127 def postprocess_url(self, url): | |
128 # Handle alternative syntax of SSH URLS | |
129 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: | |
130 return "ssh://" + url.replace(":", "/", 1) | |
131 return url | |
132 | |
133 repo_types = OrderedDict(( | |
134 ("hg", Mercurial()), | |
135 ("git", Git()), | |
136 )) | |
137 | |
138 def parse_spec(path, line): | 44 def parse_spec(path, line): |
139 if "=" not in line: | 45 if "=" not in line: |
140 logging.warning("Invalid line in file %s: %s" % (path, line)) | 46 logging.warning("Invalid line in file %s: %s" % (path, line)) |
141 return None, None | 47 return None, None |
142 | 48 |
143 key, value = line.split("=", 1) | 49 key, value = line.split("=", 1) |
144 key = key.strip() | 50 key = key.strip() |
145 items = value.split() | 51 items = value.split() |
146 if not len(items): | 52 if not len(items): |
147 logging.warning("No value specified for key %s in file %s" % (key, path)) | 53 logging.warning("No value specified for key %s in file %s" % (key, path)) |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
281 config["_root"] = overrideroots | 187 config["_root"] = overrideroots |
282 | 188 |
283 for dir, revisions in config.iteritems(): | 189 for dir, revisions in config.iteritems(): |
284 if dir.startswith("_") or revisions["_source"] in skipdependencies: | 190 if dir.startswith("_") or revisions["_source"] in skipdependencies: |
285 continue | 191 continue |
286 target = safe_join(repodir, dir) | 192 target = safe_join(repodir, dir) |
287 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) | 193 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) |
288 update_repo(target, revisions) | 194 update_repo(target, revisions) |
289 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies) | 195 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies) |
290 | 196 |
291 if self_update and "_self" in config and "*" in config["_self"]: | 197 if self_update and config.get("_self", {}).get("*", "").lower() not in ("", "0 ", "false"): |
Sebastian Noack
2015/05/19 11:52:41
The logic checking the "true"-ness of the value is
Sebastian Noack
2015/05/19 13:25:36
Just realized that we have a ConfigParser here. So
| |
292 source = safe_join(repodir, config["_self"]["*"]) | 198 original_path = sys.path |
Sebastian Noack
2015/05/19 11:52:41
This is useless, as you merely backup the referenc
| |
199 if "_module_path" in config and "*" in config["_module_path"]: | |
200 sys.path.insert(safe_join(repodir, config["_module_path"]["*"]), 0) | |
201 | |
293 try: | 202 try: |
294 with io.open(source, "rb") as handle: | 203 from buildtools.script_compiler import compile_script |
295 sourcedata = handle.read() | 204 sourcedata = "".join(compile_script("buildtools.ensure_dependencies", [ |
296 except IOError, e: | 205 "buildtools", |
297 if e.errno != errno.ENOENT: | 206 "buildtools.script_compiler", |
298 raise | 207 "buildtools.vcs", |
299 logging.warning("File %s doesn't exist, skipping self-update" % source) | 208 ])) |
300 return | |
301 | 209 |
302 target = __file__ | 210 target = __file__ |
303 with io.open(target, "rb") as handle: | 211 try: |
304 targetdata = handle.read() | 212 with io.open(target, "rb") as handle: |
Sebastian Noack
2015/05/19 11:52:41
Nit: You can just use the open() built-in function
Sebastian Noack
2015/05/19 11:52:41
Nit: "file" would be a more appropriate variable n
| |
213 targetdata = handle.read() | |
214 except IOError, e: | |
215 if e.errno != errno.ENOENT: | |
216 raise | |
217 targetdata = None | |
305 | 218 |
306 if sourcedata != targetdata: | 219 if sourcedata != targetdata: |
307 logging.info("Updating %s from %s, don't forget to commit" % (source, targ et)) | 220 logging.info("Updating %s, don't forget to commit" % target) |
308 with io.open(target, "wb") as handle: | 221 with io.open(target, "wb") as handle: |
309 handle.write(sourcedata) | 222 handle.write(sourcedata) |
310 if __name__ == "__main__": | 223 if __name__ == "__main__": |
311 logging.info("Restarting %s" % target) | 224 logging.info("Restarting %s" % target) |
312 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]) | 225 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]) |
313 else: | 226 else: |
314 logging.warning("Cannot restart %s automatically, please rerun" % target ) | 227 logging.warning("Cannot restart %s automatically, please rerun" % targ et) |
315 | 228 except Exception, e: |
316 def _ensure_line_exists(path, pattern): | 229 logging.warning("Failed to update %s, skipping self-update" % __file__) |
Sebastian Noack
2015/05/19 11:52:41
Pro-Tip: logging.warning(.., exc_info=True)
| |
317 with open(path, 'a+') as f: | 230 traceback.print_exc() |
318 file_content = [l.strip() for l in f.readlines()] | 231 finally: |
319 if not pattern in file_content: | 232 sys.path = original_path |
320 file_content.append(pattern) | |
321 f.seek(0, os.SEEK_SET) | |
322 f.truncate() | |
323 for l in file_content: | |
324 print >>f, l | |
325 | 233 |
326 if __name__ == "__main__": | 234 if __name__ == "__main__": |
327 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) | 235 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) |
328 | 236 |
329 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") | 237 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") |
330 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path") | 238 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path") |
239 parser.add_argument("-s", "--self", metavar="path", type=str, help="Update ens ure_dependencies.py at this location") | |
Wladimir Palant
2015/05/18 15:27:00
This now allows two ways of setting up ensure_depe
| |
331 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output") | 240 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output") |
332 args = parser.parse_args() | 241 args = parser.parse_args() |
333 | 242 |
334 if args.quiet: | 243 if args.quiet: |
335 logging.disable(logging.INFO) | 244 logging.disable(logging.INFO) |
245 if args.self: | |
246 __file__ = args.self | |
336 | 247 |
337 repos = args.repos | 248 repos = args.repos |
338 if not len(repos): | 249 if not len(repos): |
339 repos = [os.path.dirname(__file__)] | 250 repos = [os.path.dirname(__file__)] |
340 for repo in repos: | 251 for repo in repos: |
341 resolve_deps(repo) | 252 resolve_deps(repo) |
OLD | NEW |