Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: ensure_dependencies.py

Issue 29321227: Issue 2735 - Use ensure_dependencies.py in adblockplusie (Closed)
Left Patch Set: Created June 30, 2015, 12:43 a.m.
Right Patch Set: Update libadblockplus version. Use the same version of ensure_dependencies.py as in buildtools Created July 2, 2015, 10:22 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « dependencies ('k') | no next file » | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8 2 # coding: utf-8
3 3
4 # This Source Code Form is subject to the terms of the Mozilla Public 4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 7
8 import sys 8 import sys
9 import os 9 import os
10 import posixpath 10 import posixpath
(...skipping 16 matching lines...) Expand all
27 # File to update this script from (optional) 27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory 29 # Check out elemhidehelper repository into extensions/elemhidehelper directory
30 # at tag "1.2". 30 # at tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific 32 # Check out buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 """ 35 """
36 36
37 SKIP_DEPENDENCY_UPDATES = os.environ.get(
Felix Dahlke 2015/07/02 10:02:06 This is a newer version of the script then what's
38 "SKIP_DEPENDENCY_UPDATES", ""
39 ).lower() not in ("", "0", "false")
40
41 class Mercurial(): 37 class Mercurial():
42 def istype(self, repodir): 38 def istype(self, repodir):
43 return os.path.exists(os.path.join(repodir, ".hg")) 39 return os.path.exists(os.path.join(repodir, ".hg"))
44 40
45 def clone(self, source, target): 41 def clone(self, source, target):
46 if not source.endswith("/"): 42 if not source.endswith("/"):
47 source += "/" 43 source += "/"
48 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t]) 44 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t])
49 45
50 def get_revision_id(self, repo, rev=None): 46 def get_revision_id(self, repo, rev=None):
(...skipping 24 matching lines...) Expand all
75 71
76 if not config.has_section("ui"): 72 if not config.has_section("ui"):
77 config.add_section("ui") 73 config.add_section("ui")
78 74
79 config.set("ui", "ignore.dependencies", ignore_path) 75 config.set("ui", "ignore.dependencies", ignore_path)
80 with open(config_path, "w") as stream: 76 with open(config_path, "w") as stream:
81 config.write(stream) 77 config.write(stream)
82 78
83 module = os.path.relpath(target, repo) 79 module = os.path.relpath(target, repo)
84 _ensure_line_exists(ignore_path, module) 80 _ensure_line_exists(ignore_path, module)
85
86 def postprocess_url(self, url):
87 return url
88 81
89 class Git(): 82 class Git():
90 def istype(self, repodir): 83 def istype(self, repodir):
91 return os.path.exists(os.path.join(repodir, ".git")) 84 return os.path.exists(os.path.join(repodir, ".git"))
92 85
93 def clone(self, source, target): 86 def clone(self, source, target):
94 source = source.rstrip("/") 87 source = source.rstrip("/")
95 if not source.endswith(".git"): 88 if not source.endswith(".git"):
96 source += ".git" 89 source += ".git"
97 subprocess.check_call(["git", "clone", "--quiet", source, target]) 90 subprocess.check_call(["git", "clone", "--quiet", source, target])
98 91
99 def get_revision_id(self, repo, rev="HEAD"): 92 def get_revision_id(self, repo, rev="HEAD"):
100 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 93 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}']
101 return subprocess.check_output(command, cwd=repo).strip() 94 return subprocess.check_output(command, cwd=repo).strip()
102 95
103 def pull(self, repo): 96 def pull(self, repo):
104 # Fetch tracked branches, new tags and the list of available remote branches
105 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po) 97 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po)
106 # Next we need to ensure all remote branches are tracked
107 newly_tracked = False
108 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo)
109 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
110 remote, local = match.groups()
111 with open(os.devnull, "wb") as devnull:
112 if subprocess.call(["git", "branch", "--track", local, remote],
113 cwd=repo, stdout=devnull, stderr=devnull) == 0:
114 newly_tracked = True
115 # Finally fetch any newly tracked remote branches
116 if newly_tracked:
117 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo)
118 98
119 def update(self, repo, rev): 99 def update(self, repo, rev):
120 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) 100 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo)
121 101
122 def ignore(self, target, repo): 102 def ignore(self, target, repo):
123 module = os.path.relpath(target, repo) 103 module = os.path.relpath(target, repo)
124 exclude_file = os.path.join(repo, ".git", "info", "exclude") 104 exclude_file = os.path.join(repo, ".git", "info", "exclude")
125 _ensure_line_exists(exclude_file, module) 105 _ensure_line_exists(exclude_file, module)
126
127 def postprocess_url(self, url):
128 # Handle alternative syntax of SSH URLS
129 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
130 return "ssh://" + url.replace(":", "/", 1)
131 return url
132 106
133 repo_types = OrderedDict(( 107 repo_types = OrderedDict((
134 ("hg", Mercurial()), 108 ("hg", Mercurial()),
135 ("git", Git()), 109 ("git", Git()),
136 )) 110 ))
137 111
138 def parse_spec(path, line): 112 def parse_spec(path, line):
139 if "=" not in line: 113 if "=" not in line:
140 logging.warning("Invalid line in file %s: %s" % (path, line)) 114 logging.warning("Invalid line in file %s: %s" % (path, line))
141 return None, None 115 return None, None
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
177 if spec: 151 if spec:
178 result[key] = spec 152 result[key] = spec
179 return result 153 return result
180 except IOError, e: 154 except IOError, e:
181 if e.errno != errno.ENOENT: 155 if e.errno != errno.ENOENT:
182 raise 156 raise
183 return None 157 return None
184 158
185 def safe_join(path, subpath): 159 def safe_join(path, subpath):
186 # This has been inspired by Flask's safe_join() function 160 # This has been inspired by Flask's safe_join() function
187 forbidden = {os.sep, os.altsep} - {posixpath.sep, None} 161 forbidden = set([os.sep, os.altsep]) - set([posixpath.sep, None])
188 if any(sep in subpath for sep in forbidden): 162 if any(sep in subpath for sep in forbidden):
189 raise Exception("Illegal directory separator in dependency path %s" % subpat h) 163 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
190 164
191 normpath = posixpath.normpath(subpath) 165 normpath = posixpath.normpath(subpath)
192 if posixpath.isabs(normpath): 166 if posixpath.isabs(normpath):
193 raise Exception("Dependency path %s cannot be absolute" % subpath) 167 raise Exception("Dependency path %s cannot be absolute" % subpath)
194 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep): 168 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
195 raise Exception("Dependency path %s has to be inside the repository" % subpa th) 169 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
196 return os.path.join(path, *normpath.split(posixpath.sep)) 170 return os.path.join(path, *normpath.split(posixpath.sep))
197 171
198 def get_repo_type(repo): 172 def get_repo_type(repo):
199 for name, repotype in repo_types.iteritems(): 173 for name, repotype in repo_types.iteritems():
200 if repotype.istype(repo): 174 if repotype.istype(repo):
201 return name 175 return name
202 return None 176 return None
203 177
204 def ensure_repo(parentrepo, target, roots, sourcename): 178 def ensure_repo(parentrepo, target, roots, sourcename):
205 if os.path.exists(target): 179 if os.path.exists(target):
206 return
207
208 if SKIP_DEPENDENCY_UPDATES:
209 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
210 "%s not cloned", target)
211 return 180 return
212 181
213 parenttype = get_repo_type(parentrepo) 182 parenttype = get_repo_type(parentrepo)
214 type = None 183 type = None
215 for key in roots: 184 for key in roots:
216 if key == parenttype or (key in repo_types and type is None): 185 if key == parenttype or (key in repo_types and type is None):
217 type = key 186 type = key
218 if type is None: 187 if type is None:
219 raise Exception("No valid source found to create %s" % target) 188 raise Exception("No valid source found to create %s" % target)
220 189
221 postprocess_url = repo_types[type].postprocess_url 190 if os.path.exists(roots[type]):
222 root = postprocess_url(roots[type]) 191 url = os.path.join(roots[type], sourcename)
223 sourcename = postprocess_url(sourcename)
224
225 if os.path.exists(root):
226 url = os.path.join(root, sourcename)
227 else: 192 else:
228 url = urlparse.urljoin(root, sourcename) 193 url = urlparse.urljoin(roots[type], sourcename)
229 194
230 logging.info("Cloning repository %s into %s" % (url, target)) 195 logging.info("Cloning repository %s into %s" % (url, target))
231 repo_types[type].clone(url, target) 196 repo_types[type].clone(url, target)
232 197
233 for repo in repo_types.itervalues(): 198 for repo in repo_types.itervalues():
234 if repo.istype(parentrepo): 199 if repo.istype(parentrepo):
235 repo.ignore(target, parentrepo) 200 repo.ignore(target, parentrepo)
236 201
237 def update_repo(target, revisions): 202 def update_repo(target, revisions):
238 type = get_repo_type(target) 203 type = get_repo_type(target)
239 if type is None: 204 if type is None:
240 logging.warning("Type of repository %s unknown, skipping update" % target) 205 logging.warning("Type of repository %s unknown, skipping update" % target)
241 return 206 return
242 207
243 if type in revisions: 208 if type in revisions:
244 revision = revisions[type] 209 revision = revisions[type]
245 elif "*" in revisions: 210 elif "*" in revisions:
246 revision = revisions["*"] 211 revision = revisions["*"]
247 else: 212 else:
248 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type)) 213 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
249 return 214 return
250 215
251 resolved_revision = repo_types[type].get_revision_id(target, revision) 216 resolved_revision = repo_types[type].get_revision_id(target, revision)
217 if not resolved_revision:
218 logging.info("Revision %s is unknown, downloading remote changes" % revision )
219 repo_types[type].pull(target)
220 resolved_revision = repo_types[type].get_revision_id(target, revision)
221 if not resolved_revision:
222 raise Exception("Failed to resolve revision %s" % revision)
223
252 current_revision = repo_types[type].get_revision_id(target) 224 current_revision = repo_types[type].get_revision_id(target)
253
254 if resolved_revision != current_revision: 225 if resolved_revision != current_revision:
255 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
257 "%s not checked out to %s", target, revision)
258 return
259
260 if not resolved_revision:
261 logging.info("Revision %s is unknown, downloading remote changes" % revisi on)
262 repo_types[type].pull(target)
263 resolved_revision = repo_types[type].get_revision_id(target, revision)
264 if not resolved_revision:
265 raise Exception("Failed to resolve revision %s" % revision)
266
267 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision)) 226 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision))
268 repo_types[type].update(target, resolved_revision) 227 repo_types[type].update(target, resolved_revision)
269 228
270 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 229 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
271 config = read_deps(repodir) 230 config = read_deps(repodir)
272 if config is None: 231 if config is None:
273 if level == 0: 232 if level == 0:
274 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 233 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE))
275 return 234 return
276 if level >= 10: 235 if level >= 10:
277 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 236 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
278 return
279 237
280 if overrideroots is not None: 238 if overrideroots is not None:
281 config["_root"] = overrideroots 239 config["_root"] = overrideroots
282 240
283 for dir, revisions in config.iteritems(): 241 for dir, revisions in config.iteritems():
284 if dir.startswith("_") or revisions["_source"] in skipdependencies: 242 if dir.startswith("_") or revisions["_source"] in skipdependencies:
285 continue 243 continue
286 target = safe_join(repodir, dir) 244 target = safe_join(repodir, dir)
287 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) 245 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"])
288 update_repo(target, revisions) 246 update_repo(target, revisions)
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
332 args = parser.parse_args() 290 args = parser.parse_args()
333 291
334 if args.quiet: 292 if args.quiet:
335 logging.disable(logging.INFO) 293 logging.disable(logging.INFO)
336 294
337 repos = args.repos 295 repos = args.repos
338 if not len(repos): 296 if not len(repos):
339 repos = [os.path.dirname(__file__)] 297 repos = [os.path.dirname(__file__)]
340 for repo in repos: 298 for repo in repos:
341 resolve_deps(repo) 299 resolve_deps(repo)
LEFTRIGHT
« dependencies ('k') | no next file » | Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Toggle Comments ('s')

Powered by Google App Engine
This is Rietveld