Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29317074: Issue 2693 - Update adblockplus dependency (Closed)
Patch Set: rebase Created July 2, 2015, 1:37 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « dependencies ('k') | include/AdblockPlus/FilterEngine.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8 2 # coding: utf-8
3 3
4 # This Source Code Form is subject to the terms of the Mozilla Public 4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 7
8 import sys 8 import sys
9 import os 9 import os
10 import posixpath 10 import posixpath
(...skipping 16 matching lines...) Expand all
27 # File to update this script from (optional) 27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory 29 # Check out elemhidehelper repository into extensions/elemhidehelper directory
30 # at tag "1.2". 30 # at tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific 32 # Check out buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 """ 35 """
36 36
37 SKIP_DEPENDENCY_UPDATES = os.environ.get(
38 "SKIP_DEPENDENCY_UPDATES", ""
39 ).lower() not in ("", "0", "false")
40
37 class Mercurial(): 41 class Mercurial():
38 def istype(self, repodir): 42 def istype(self, repodir):
39 return os.path.exists(os.path.join(repodir, ".hg")) 43 return os.path.exists(os.path.join(repodir, ".hg"))
40 44
41 def clone(self, source, target): 45 def clone(self, source, target):
42 if not source.endswith("/"): 46 if not source.endswith("/"):
43 source += "/" 47 source += "/"
44 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t]) 48 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t])
45 49
46 def get_revision_id(self, repo, rev=None): 50 def get_revision_id(self, repo, rev=None):
(...skipping 25 matching lines...) Expand all
72 if not config.has_section("ui"): 76 if not config.has_section("ui"):
73 config.add_section("ui") 77 config.add_section("ui")
74 78
75 config.set("ui", "ignore.dependencies", ignore_path) 79 config.set("ui", "ignore.dependencies", ignore_path)
76 with open(config_path, "w") as stream: 80 with open(config_path, "w") as stream:
77 config.write(stream) 81 config.write(stream)
78 82
79 module = os.path.relpath(target, repo) 83 module = os.path.relpath(target, repo)
80 _ensure_line_exists(ignore_path, module) 84 _ensure_line_exists(ignore_path, module)
81 85
86 def postprocess_url(self, url):
87 return url
88
82 class Git(): 89 class Git():
83 def istype(self, repodir): 90 def istype(self, repodir):
84 return os.path.exists(os.path.join(repodir, ".git")) 91 return os.path.exists(os.path.join(repodir, ".git"))
85 92
86 def clone(self, source, target): 93 def clone(self, source, target):
87 source = source.rstrip("/") 94 source = source.rstrip("/")
88 if not source.endswith(".git"): 95 if not source.endswith(".git"):
89 source += ".git" 96 source += ".git"
90 subprocess.check_call(["git", "clone", "--quiet", source, target]) 97 subprocess.check_call(["git", "clone", "--quiet", source, target])
91 98
92 def get_revision_id(self, repo, rev="HEAD"): 99 def get_revision_id(self, repo, rev="HEAD"):
93 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 100 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}']
94 return subprocess.check_output(command, cwd=repo).strip() 101 return subprocess.check_output(command, cwd=repo).strip()
95 102
96 def pull(self, repo): 103 def pull(self, repo):
104 # Fetch tracked branches, new tags and the list of available remote branches
97 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po) 105 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po)
106 # Next we need to ensure all remote branches are tracked
107 newly_tracked = False
108 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo)
109 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
110 remote, local = match.groups()
111 with open(os.devnull, "wb") as devnull:
112 if subprocess.call(["git", "branch", "--track", local, remote],
113 cwd=repo, stdout=devnull, stderr=devnull) == 0:
114 newly_tracked = True
115 # Finally fetch any newly tracked remote branches
116 if newly_tracked:
117 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo)
98 118
99 def update(self, repo, rev): 119 def update(self, repo, rev):
100 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) 120 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo)
101 121
102 def ignore(self, target, repo): 122 def ignore(self, target, repo):
103 module = os.path.relpath(target, repo) 123 module = os.path.relpath(target, repo)
104 exclude_file = os.path.join(repo, ".git", "info", "exclude") 124 exclude_file = os.path.join(repo, ".git", "info", "exclude")
105 _ensure_line_exists(exclude_file, module) 125 _ensure_line_exists(exclude_file, module)
106 126
127 def postprocess_url(self, url):
128 # Handle alternative syntax of SSH URLS
129 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
130 return "ssh://" + url.replace(":", "/", 1)
131 return url
132
107 repo_types = OrderedDict(( 133 repo_types = OrderedDict((
108 ("hg", Mercurial()), 134 ("hg", Mercurial()),
109 ("git", Git()), 135 ("git", Git()),
110 )) 136 ))
111 137
112 def parse_spec(path, line): 138 def parse_spec(path, line):
113 if "=" not in line: 139 if "=" not in line:
114 logging.warning("Invalid line in file %s: %s" % (path, line)) 140 logging.warning("Invalid line in file %s: %s" % (path, line))
115 return None, None 141 return None, None
116 142
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
151 if spec: 177 if spec:
152 result[key] = spec 178 result[key] = spec
153 return result 179 return result
154 except IOError, e: 180 except IOError, e:
155 if e.errno != errno.ENOENT: 181 if e.errno != errno.ENOENT:
156 raise 182 raise
157 return None 183 return None
158 184
159 def safe_join(path, subpath): 185 def safe_join(path, subpath):
160 # This has been inspired by Flask's safe_join() function 186 # This has been inspired by Flask's safe_join() function
161 forbidden = set([os.sep, os.altsep]) - set([posixpath.sep, None]) 187 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
162 if any(sep in subpath for sep in forbidden): 188 if any(sep in subpath for sep in forbidden):
163 raise Exception("Illegal directory separator in dependency path %s" % subpat h) 189 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
164 190
165 normpath = posixpath.normpath(subpath) 191 normpath = posixpath.normpath(subpath)
166 if posixpath.isabs(normpath): 192 if posixpath.isabs(normpath):
167 raise Exception("Dependency path %s cannot be absolute" % subpath) 193 raise Exception("Dependency path %s cannot be absolute" % subpath)
168 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep): 194 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
169 raise Exception("Dependency path %s has to be inside the repository" % subpa th) 195 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
170 return os.path.join(path, *normpath.split(posixpath.sep)) 196 return os.path.join(path, *normpath.split(posixpath.sep))
171 197
172 def get_repo_type(repo): 198 def get_repo_type(repo):
173 for name, repotype in repo_types.iteritems(): 199 for name, repotype in repo_types.iteritems():
174 if repotype.istype(repo): 200 if repotype.istype(repo):
175 return name 201 return name
176 return None 202 return None
177 203
178 def ensure_repo(parentrepo, target, roots, sourcename): 204 def ensure_repo(parentrepo, target, roots, sourcename):
179 if os.path.exists(target): 205 if os.path.exists(target):
180 return 206 return
181 207
208 if SKIP_DEPENDENCY_UPDATES:
209 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
210 "%s not cloned", target)
211 return
212
182 parenttype = get_repo_type(parentrepo) 213 parenttype = get_repo_type(parentrepo)
183 type = None 214 type = None
184 for key in roots: 215 for key in roots:
185 if key == parenttype or (key in repo_types and type is None): 216 if key == parenttype or (key in repo_types and type is None):
186 type = key 217 type = key
187 if type is None: 218 if type is None:
188 raise Exception("No valid source found to create %s" % target) 219 raise Exception("No valid source found to create %s" % target)
189 220
190 if os.path.exists(roots[type]): 221 postprocess_url = repo_types[type].postprocess_url
191 url = os.path.join(roots[type], sourcename) 222 root = postprocess_url(roots[type])
223 sourcename = postprocess_url(sourcename)
224
225 if os.path.exists(root):
226 url = os.path.join(root, sourcename)
192 else: 227 else:
193 url = urlparse.urljoin(roots[type], sourcename) 228 url = urlparse.urljoin(root, sourcename)
194 229
195 logging.info("Cloning repository %s into %s" % (url, target)) 230 logging.info("Cloning repository %s into %s" % (url, target))
196 repo_types[type].clone(url, target) 231 repo_types[type].clone(url, target)
197 232
198 for repo in repo_types.itervalues(): 233 for repo in repo_types.itervalues():
199 if repo.istype(parentrepo): 234 if repo.istype(parentrepo):
200 repo.ignore(target, parentrepo) 235 repo.ignore(target, parentrepo)
201 236
202 def update_repo(target, revisions): 237 def update_repo(target, revisions):
203 type = get_repo_type(target) 238 type = get_repo_type(target)
204 if type is None: 239 if type is None:
205 logging.warning("Type of repository %s unknown, skipping update" % target) 240 logging.warning("Type of repository %s unknown, skipping update" % target)
206 return 241 return
207 242
208 if type in revisions: 243 if type in revisions:
209 revision = revisions[type] 244 revision = revisions[type]
210 elif "*" in revisions: 245 elif "*" in revisions:
211 revision = revisions["*"] 246 revision = revisions["*"]
212 else: 247 else:
213 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type)) 248 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
214 return 249 return
215 250
216 resolved_revision = repo_types[type].get_revision_id(target, revision) 251 resolved_revision = repo_types[type].get_revision_id(target, revision)
217 if not resolved_revision: 252 current_revision = repo_types[type].get_revision_id(target)
218 logging.info("Revision %s is unknown, downloading remote changes" % revision ) 253
219 repo_types[type].pull(target) 254 if resolved_revision != current_revision:
220 resolved_revision = repo_types[type].get_revision_id(target, revision) 255 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
257 "%s not checked out to %s", target, revision)
258 return
259
221 if not resolved_revision: 260 if not resolved_revision:
222 raise Exception("Failed to resolve revision %s" % revision) 261 logging.info("Revision %s is unknown, downloading remote changes" % revisi on)
262 repo_types[type].pull(target)
263 resolved_revision = repo_types[type].get_revision_id(target, revision)
264 if not resolved_revision:
265 raise Exception("Failed to resolve revision %s" % revision)
223 266
224 current_revision = repo_types[type].get_revision_id(target)
225 if resolved_revision != current_revision:
226 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision)) 267 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision))
227 repo_types[type].update(target, resolved_revision) 268 repo_types[type].update(target, resolved_revision)
228 269
229 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 270 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
230 config = read_deps(repodir) 271 config = read_deps(repodir)
231 if config is None: 272 if config is None:
232 if level == 0: 273 if level == 0:
233 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 274 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE))
234 return 275 return
235 if level >= 10: 276 if level >= 10:
236 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 277 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
278 return
237 279
238 if overrideroots is not None: 280 if overrideroots is not None:
239 config["_root"] = overrideroots 281 config["_root"] = overrideroots
240 282
241 for dir, revisions in config.iteritems(): 283 for dir, revisions in config.iteritems():
242 if dir.startswith("_") or revisions["_source"] in skipdependencies: 284 if dir.startswith("_") or revisions["_source"] in skipdependencies:
243 continue 285 continue
244 target = safe_join(repodir, dir) 286 target = safe_join(repodir, dir)
245 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) 287 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"])
246 update_repo(target, revisions) 288 update_repo(target, revisions)
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
290 args = parser.parse_args() 332 args = parser.parse_args()
291 333
292 if args.quiet: 334 if args.quiet:
293 logging.disable(logging.INFO) 335 logging.disable(logging.INFO)
294 336
295 repos = args.repos 337 repos = args.repos
296 if not len(repos): 338 if not len(repos):
297 repos = [os.path.dirname(__file__)] 339 repos = [os.path.dirname(__file__)]
298 for repo in repos: 340 for repo in repos:
299 resolve_deps(repo) 341 resolve_deps(repo)
OLDNEW
« no previous file with comments | « dependencies ('k') | include/AdblockPlus/FilterEngine.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld