Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29335462: Issue 3337 - Update adblockplus reference in libadblockplus to revision hg:813682c7161e (Closed)
Patch Set: Created Feb. 3, 2016, 1:26 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8 2 # coding: utf-8
3 3
4 # This Source Code Form is subject to the terms of the Mozilla Public 4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 7
8 import sys 8 import sys
9 import os 9 import os
10 import posixpath 10 import posixpath
11 import re 11 import re
12 import io 12 import io
13 import errno 13 import errno
14 import logging 14 import logging
15 import subprocess 15 import subprocess
16 import urlparse 16 import urlparse
17 import argparse 17 import argparse
18 18
19 from collections import OrderedDict 19 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 20 from ConfigParser import RawConfigParser
21 21
22 USAGE = """ 22 USAGE = """
23 A dependencies file should look like this: 23 A dependencies file should look like this:
24 24
25 # VCS-specific root URLs for the repositories 25 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory 29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # at tag "1.2". 30 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific 32 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
35 """ 41 """
36 42
37 SKIP_DEPENDENCY_UPDATES = os.environ.get( 43 SKIP_DEPENDENCY_UPDATES = os.environ.get(
38 "SKIP_DEPENDENCY_UPDATES", "" 44 "SKIP_DEPENDENCY_UPDATES", ""
39 ).lower() not in ("", "0", "false") 45 ).lower() not in ("", "0", "false")
40 46
41 class Mercurial(): 47 class Mercurial():
42 def istype(self, repodir): 48 def istype(self, repodir):
43 return os.path.exists(os.path.join(repodir, ".hg")) 49 return os.path.exists(os.path.join(repodir, ".hg"))
44 50
45 def clone(self, source, target): 51 def clone(self, source, target):
46 if not source.endswith("/"): 52 if not source.endswith("/"):
47 source += "/" 53 source += "/"
48 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t]) 54 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t])
49 55
50 def get_revision_id(self, repo, rev=None): 56 def get_revision_id(self, repo, rev=None):
51 command = ["hg", "id", "--repository", repo, "--id"] 57 command = ["hg", "id", "--repository", repo, "--id"]
52 if rev: 58 if rev:
53 command.extend(["--rev", rev]) 59 command.extend(["--rev", rev])
54 60
55 # Ignore stderr output and return code here: if revision lookup failed we 61 # Ignore stderr output and return code here: if revision lookup failed we
56 # should simply return an empty string. 62 # should simply return an empty string.
57 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0] 63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0]
58 return result.strip() 64 return result.strip()
59 65
60 def pull(self, repo): 66 def pull(self, repo):
61 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 67 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"])
62 68
63 def update(self, repo, rev): 69 def update(self, repo, rev, revname):
64 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev]) 70 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev])
65 71
66 def ignore(self, target, repo): 72 def ignore(self, target, repo):
67 73
68 if not self.istype(target): 74 if not self.istype(target):
69 75
70 config_path = os.path.join(repo, ".hg", "hgrc") 76 config_path = os.path.join(repo, ".hg", "hgrc")
71 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies")) 77 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies"))
72 78
73 config = RawConfigParser() 79 config = RawConfigParser()
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
109 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): 115 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
110 remote, local = match.groups() 116 remote, local = match.groups()
111 with open(os.devnull, "wb") as devnull: 117 with open(os.devnull, "wb") as devnull:
112 if subprocess.call(["git", "branch", "--track", local, remote], 118 if subprocess.call(["git", "branch", "--track", local, remote],
113 cwd=repo, stdout=devnull, stderr=devnull) == 0: 119 cwd=repo, stdout=devnull, stderr=devnull) == 0:
114 newly_tracked = True 120 newly_tracked = True
115 # Finally fetch any newly tracked remote branches 121 # Finally fetch any newly tracked remote branches
116 if newly_tracked: 122 if newly_tracked:
117 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo) 123 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo)
118 124
119 def update(self, repo, rev): 125 def update(self, repo, rev, revname):
120 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) 126 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo)
121 127
122 def ignore(self, target, repo): 128 def ignore(self, target, repo):
123 module = os.path.relpath(target, repo) 129 module = os.path.sep + os.path.relpath(target, repo)
124 exclude_file = os.path.join(repo, ".git", "info", "exclude") 130 exclude_file = os.path.join(repo, ".git", "info", "exclude")
125 _ensure_line_exists(exclude_file, module) 131 _ensure_line_exists(exclude_file, module)
126 132
127 def postprocess_url(self, url): 133 def postprocess_url(self, url):
128 # Handle alternative syntax of SSH URLS 134 # Handle alternative syntax of SSH URLS
129 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 135 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
130 return "ssh://" + url.replace(":", "/", 1) 136 return "ssh://" + url.replace(":", "/", 1)
131 return url 137 return url
132 138
133 repo_types = OrderedDict(( 139 repo_types = OrderedDict((
134 ("hg", Mercurial()), 140 ("hg", Mercurial()),
135 ("git", Git()), 141 ("git", Git()),
136 )) 142 ))
137 143
144 # [vcs:]value
145 item_regexp = re.compile(
146 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) +"):)?"
147 "(.+)$"
148 )
149
150 # [url@]rev
151 source_regexp = re.compile(
152 "^(?:(.*)@)?"
153 "(.+)$"
154 )
155
156 def merge_seqs(seq1, seq2):
157 """Return a list of any truthy values from the suplied sequences
158
159 (None, 2), (1,) => [1, 2]
160 None, (1, 2) => [1, 2]
161 (1, 2), (3, 4) => [3, 4]
162 """
163 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
164
138 def parse_spec(path, line): 165 def parse_spec(path, line):
139 if "=" not in line: 166 if "=" not in line:
140 logging.warning("Invalid line in file %s: %s" % (path, line)) 167 logging.warning("Invalid line in file %s: %s" % (path, line))
141 return None, None 168 return None, None
142 169
143 key, value = line.split("=", 1) 170 key, value = line.split("=", 1)
144 key = key.strip() 171 key = key.strip()
145 items = value.split() 172 items = value.split()
146 if not len(items): 173 if not len(items):
147 logging.warning("No value specified for key %s in file %s" % (key, path)) 174 logging.warning("No value specified for key %s in file %s" % (key, path))
148 return key, None 175 return key, None
149 176
150 result = OrderedDict() 177 result = OrderedDict()
151 if not key.startswith("_"): 178 is_dependency_field = not key.startswith("_")
152 result["_source"] = items.pop(0)
153 179
154 for item in items: 180 for i, item in enumerate(items):
155 if ":" in item: 181 try:
156 type, value = item.split(":", 1) 182 vcs, value = re.search(item_regexp, item).groups()
157 else: 183 vcs = vcs or "*"
158 type, value = ("*", item) 184 if is_dependency_field:
159 if type in result: 185 if i == 0 and vcs == "*":
160 logging.warning("Ignoring duplicate value for type %s (key %s in file %s)" % (type, key, path)) 186 # In order to be backwards compatible we have to assume that the first
161 else: 187 # source contains only a URL/path for the repo if it does not contain
162 result[type] = value 188 # the VCS part
189 url_rev = (value, None)
190 else:
191 url_rev = re.search(source_regexp, value).groups()
192 result[vcs] = merge_seqs(result.get(vcs), url_rev)
193 else:
194 if vcs in result:
195 logging.warning("Ignoring duplicate value for type %r "
196 "(key %r in file %r)" % (vcs, key, path))
197 result[vcs] = value
198 except AttributeError:
199 logging.warning("Ignoring invalid item %r for type %r "
200 "(key %r in file %r)" % (item, vcs, key, path))
201 continue
163 return key, result 202 return key, result
164 203
165 def read_deps(repodir): 204 def read_deps(repodir):
166 result = {} 205 result = {}
167 deps_path = os.path.join(repodir, "dependencies") 206 deps_path = os.path.join(repodir, "dependencies")
168 try: 207 try:
169 with io.open(deps_path, "rt", encoding="utf-8") as handle: 208 with io.open(deps_path, "rt", encoding="utf-8") as handle:
170 for line in handle: 209 for line in handle:
171 # Remove comments and whitespace 210 # Remove comments and whitespace
172 line = re.sub(r"#.*", "", line).strip() 211 line = re.sub(r"#.*", "", line).strip()
(...skipping 21 matching lines...) Expand all
194 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep): 233 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
195 raise Exception("Dependency path %s has to be inside the repository" % subpa th) 234 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
196 return os.path.join(path, *normpath.split(posixpath.sep)) 235 return os.path.join(path, *normpath.split(posixpath.sep))
197 236
198 def get_repo_type(repo): 237 def get_repo_type(repo):
199 for name, repotype in repo_types.iteritems(): 238 for name, repotype in repo_types.iteritems():
200 if repotype.istype(repo): 239 if repotype.istype(repo):
201 return name 240 return name
202 return None 241 return None
203 242
204 def ensure_repo(parentrepo, target, roots, sourcename): 243 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
205 if os.path.exists(target): 244 if os.path.exists(target):
206 return 245 return
207 246
208 if SKIP_DEPENDENCY_UPDATES: 247 if SKIP_DEPENDENCY_UPDATES:
209 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 248 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
210 "%s not cloned", target) 249 "%s not cloned", target)
211 return 250 return
212 251
213 parenttype = get_repo_type(parentrepo)
214 type = None
215 for key in roots:
216 if key == parenttype or (key in repo_types and type is None):
217 type = key
218 if type is None:
219 raise Exception("No valid source found to create %s" % target)
220
221 postprocess_url = repo_types[type].postprocess_url 252 postprocess_url = repo_types[type].postprocess_url
222 root = postprocess_url(roots[type]) 253 root = postprocess_url(root)
223 sourcename = postprocess_url(sourcename) 254 sourcename = postprocess_url(sourcename)
224 255
225 if os.path.exists(root): 256 if os.path.exists(root):
226 url = os.path.join(root, sourcename) 257 url = os.path.join(root, sourcename)
227 else: 258 else:
228 url = urlparse.urljoin(root, sourcename) 259 url = urlparse.urljoin(root, sourcename)
229 260
230 logging.info("Cloning repository %s into %s" % (url, target)) 261 logging.info("Cloning repository %s into %s" % (url, target))
231 repo_types[type].clone(url, target) 262 repo_types[type].clone(url, target)
263 repo_types[parenttype].ignore(target, parentrepo)
232 264
233 for repo in repo_types.itervalues(): 265 def update_repo(target, type, revision):
234 if repo.istype(parentrepo):
235 repo.ignore(target, parentrepo)
236
237 def update_repo(target, revisions):
238 type = get_repo_type(target)
239 if type is None:
240 logging.warning("Type of repository %s unknown, skipping update" % target)
241 return
242
243 if type in revisions:
244 revision = revisions[type]
245 elif "*" in revisions:
246 revision = revisions["*"]
247 else:
248 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
249 return
250
251 resolved_revision = repo_types[type].get_revision_id(target, revision) 266 resolved_revision = repo_types[type].get_revision_id(target, revision)
252 current_revision = repo_types[type].get_revision_id(target) 267 current_revision = repo_types[type].get_revision_id(target)
253 268
254 if resolved_revision != current_revision: 269 if resolved_revision != current_revision:
255 if SKIP_DEPENDENCY_UPDATES: 270 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 271 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
257 "%s not checked out to %s", target, revision) 272 "%s not checked out to %s", target, revision)
258 return 273 return
259 274
260 if not resolved_revision: 275 if not resolved_revision:
261 logging.info("Revision %s is unknown, downloading remote changes" % revisi on) 276 logging.info("Revision %s is unknown, downloading remote changes" % revisi on)
262 repo_types[type].pull(target) 277 repo_types[type].pull(target)
263 resolved_revision = repo_types[type].get_revision_id(target, revision) 278 resolved_revision = repo_types[type].get_revision_id(target, revision)
264 if not resolved_revision: 279 if not resolved_revision:
265 raise Exception("Failed to resolve revision %s" % revision) 280 raise Exception("Failed to resolve revision %s" % revision)
266 281
267 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision)) 282 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision))
268 repo_types[type].update(target, resolved_revision) 283 repo_types[type].update(target, resolved_revision, revision)
269 284
270 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 285 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
271 config = read_deps(repodir) 286 config = read_deps(repodir)
272 if config is None: 287 if config is None:
273 if level == 0: 288 if level == 0:
274 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 289 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE))
275 return 290 return
276 if level >= 10: 291 if level >= 10:
277 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 292 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
278 return 293 return
279 294
280 if overrideroots is not None: 295 if overrideroots is not None:
281 config["_root"] = overrideroots 296 config["_root"] = overrideroots
282 297
283 for dir, revisions in config.iteritems(): 298 for dir, sources in config.iteritems():
284 if dir.startswith("_") or revisions["_source"] in skipdependencies: 299 if (dir.startswith("_") or
300 skipdependencies.intersection([s[0] for s in sources if s[0]])):
285 continue 301 continue
302
286 target = safe_join(repodir, dir) 303 target = safe_join(repodir, dir)
287 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) 304 parenttype = get_repo_type(repodir)
288 update_repo(target, revisions) 305 _root = config.get("_root", {})
289 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies) 306
307 for key in sources.keys() + _root.keys():
308 if key == parenttype or key is None and vcs != "*":
309 vcs = key
310 source, rev = merge_seqs(sources.get("*"), sources.get(vcs))
311
312 if not (vcs and source and rev):
313 logging.warning("No valid source / revision found to create %s" % target)
314 continue
315
316 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source)
317 update_repo(target, vcs, rev)
318 resolve_deps(target, level + 1, self_update=False,
319 overrideroots=overrideroots, skipdependencies=skipdependencies)
290 320
291 if self_update and "_self" in config and "*" in config["_self"]: 321 if self_update and "_self" in config and "*" in config["_self"]:
292 source = safe_join(repodir, config["_self"]["*"]) 322 source = safe_join(repodir, config["_self"]["*"])
293 try: 323 try:
294 with io.open(source, "rb") as handle: 324 with io.open(source, "rb") as handle:
295 sourcedata = handle.read() 325 sourcedata = handle.read()
296 except IOError, e: 326 except IOError, e:
297 if e.errno != errno.ENOENT: 327 if e.errno != errno.ENOENT:
298 raise 328 raise
299 logging.warning("File %s doesn't exist, skipping self-update" % source) 329 logging.warning("File %s doesn't exist, skipping self-update" % source)
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
332 args = parser.parse_args() 362 args = parser.parse_args()
333 363
334 if args.quiet: 364 if args.quiet:
335 logging.disable(logging.INFO) 365 logging.disable(logging.INFO)
336 366
337 repos = args.repos 367 repos = args.repos
338 if not len(repos): 368 if not len(repos):
339 repos = [os.path.dirname(__file__)] 369 repos = [os.path.dirname(__file__)]
340 for repo in repos: 370 for repo in repos:
341 resolve_deps(repo) 371 resolve_deps(repo)
OLDNEW

Powered by Google App Engine
This is Rietveld