Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: ensure_dependencies.py

Issue 29329056: Issue 3194 - Allow multiple sources for a dependency (Closed)
Left Patch Set: Created Oct. 12, 2015, 3:46 p.m.
Right Patch Set: Just return a list from merge_seqs instead of coercing the result into a tuple Created Oct. 16, 2015, 10:43 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « no previous file | no next file » | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8 2 # coding: utf-8
3 3
4 # This Source Code Form is subject to the terms of the Mozilla Public 4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 7
8 import sys 8 import sys
9 import os 9 import os
10 import posixpath 10 import posixpath
11 import re 11 import re
12 import io 12 import io
13 import errno 13 import errno
14 import logging 14 import logging
15 import subprocess 15 import subprocess
16 import urlparse 16 import urlparse
17 import argparse 17 import argparse
18 18
19 from collections import OrderedDict 19 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 20 from ConfigParser import RawConfigParser
21 21
22 USAGE = """ 22 USAGE = """
23 A dependencies file should look like this: 23 A dependencies file should look like this:
24 24
25 # VCS-specific root URLs for the repositories 25 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory 29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # at tag "1.2". 30 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific 32 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
35 """ 41 """
36 42
37 SKIP_DEPENDENCY_UPDATES = os.environ.get( 43 SKIP_DEPENDENCY_UPDATES = os.environ.get(
38 "SKIP_DEPENDENCY_UPDATES", "" 44 "SKIP_DEPENDENCY_UPDATES", ""
39 ).lower() not in ("", "0", "false") 45 ).lower() not in ("", "0", "false")
40 46
41 class Mercurial(): 47 class Mercurial():
42 def istype(self, repodir): 48 def istype(self, repodir):
43 return os.path.exists(os.path.join(repodir, ".hg")) 49 return os.path.exists(os.path.join(repodir, ".hg"))
44 50
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
128 # Handle alternative syntax of SSH URLS 134 # Handle alternative syntax of SSH URLS
129 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 135 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
130 return "ssh://" + url.replace(":", "/", 1) 136 return "ssh://" + url.replace(":", "/", 1)
131 return url 137 return url
132 138
133 repo_types = OrderedDict(( 139 repo_types = OrderedDict((
134 ("hg", Mercurial()), 140 ("hg", Mercurial()),
135 ("git", Git()), 141 ("git", Git()),
136 )) 142 ))
137 143
144 # [vcs:]value
145 item_regexp = re.compile(
146 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) +"):)?"
147 "(.+)$"
148 )
149
150 # [url@]rev
151 source_regexp = re.compile(
152 "^(?:(.*)@)?"
153 "(.+)$"
154 )
155
156 def merge_seqs(seq1, seq2):
157 """Return a list of any truthy values from the suplied sequences
158
159 (None, 2), (1,) => [1, 2]
160 None, (1, 2) => [1, 2]
161 (1, 2), (3, 4) => [3, 4]
162 """
163 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
164
138 def parse_spec(path, line): 165 def parse_spec(path, line):
139 if "=" not in line: 166 if "=" not in line:
140 logging.warning("Invalid line in file %s: %s" % (path, line)) 167 logging.warning("Invalid line in file %s: %s" % (path, line))
141 return None, None 168 return None, None
142 169
143 key, value = line.split("=", 1) 170 key, value = line.split("=", 1)
144 key = key.strip() 171 key = key.strip()
145 items = value.split() 172 items = value.split()
146 if not len(items): 173 if not len(items):
147 logging.warning("No value specified for key %s in file %s" % (key, path)) 174 logging.warning("No value specified for key %s in file %s" % (key, path))
148 return key, None 175 return key, None
149 176
150 result = OrderedDict() 177 result = OrderedDict()
151 if not key.startswith("_"): 178 is_dependency_field = not key.startswith("_")
152 result["_source"] = items.pop(0) 179
153 180 for i, item in enumerate(items):
154 for item in items: 181 try:
155 if ":" in item: 182 vcs, value = re.search(item_regexp, item).groups()
156 type, value = item.split(":", 1) 183 vcs = vcs or "*"
157 else: 184 if is_dependency_field:
158 type, value = ("*", item) 185 if i == 0 and vcs == "*":
159 if type in result: 186 # In order to be backwards compatible we have to assume that the first
160 logging.warning("Ignoring duplicate value for type %s (key %s in file %s)" % (type, key, path)) 187 # source contains only a URL/path for the repo if it does not contain
161 else: 188 # the VCS part
162 result[type] = value 189 url_rev = (value, None)
190 else:
191 url_rev = re.search(source_regexp, value).groups()
192 result[vcs] = merge_seqs(result.get(vcs), url_rev)
193 else:
194 if vcs in result:
195 logging.warning("Ignoring duplicate value for type %r "
196 "(key %r in file %r)" % (vcs, key, path))
197 result[vcs] = value
198 except AttributeError:
199 logging.warning("Ignoring invalid item %r for type %r "
200 "(key %r in file %r)" % (item, vcs, key, path))
201 continue
163 return key, result 202 return key, result
164 203
165 def read_deps(repodir): 204 def read_deps(repodir):
166 parenttype = get_repo_type(repodir)
167 result = {} 205 result = {}
168 deps_path = os.path.join(repodir, "dependencies") 206 deps_path = os.path.join(repodir, "dependencies")
169 try: 207 try:
170 with io.open(deps_path, "rt", encoding="utf-8") as handle: 208 with io.open(deps_path, "rt", encoding="utf-8") as handle:
171 for line in handle: 209 for line in handle:
172 # Remove comments and whitespace 210 # Remove comments and whitespace
173 line = re.sub(r"#.*", "", line).strip() 211 line = re.sub(r"#.*", "", line).strip()
174 if not line: 212 if not line:
175 continue 213 continue
176 214
177 key, spec = parse_spec(deps_path, line) 215 key, spec = parse_spec(deps_path, line)
178 if spec and (key not in result or parenttype in spec.keys()): 216 if spec:
179 result[key] = spec 217 result[key] = spec
180 return result 218 return result
181 except IOError, e: 219 except IOError, e:
182 if e.errno != errno.ENOENT: 220 if e.errno != errno.ENOENT:
183 raise 221 raise
184 return None 222 return None
185 223
186 def safe_join(path, subpath): 224 def safe_join(path, subpath):
187 # This has been inspired by Flask's safe_join() function 225 # This has been inspired by Flask's safe_join() function
188 forbidden = {os.sep, os.altsep} - {posixpath.sep, None} 226 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
189 if any(sep in subpath for sep in forbidden): 227 if any(sep in subpath for sep in forbidden):
190 raise Exception("Illegal directory separator in dependency path %s" % subpat h) 228 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
191 229
192 normpath = posixpath.normpath(subpath) 230 normpath = posixpath.normpath(subpath)
193 if posixpath.isabs(normpath): 231 if posixpath.isabs(normpath):
194 raise Exception("Dependency path %s cannot be absolute" % subpath) 232 raise Exception("Dependency path %s cannot be absolute" % subpath)
195 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep): 233 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
196 raise Exception("Dependency path %s has to be inside the repository" % subpa th) 234 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
197 return os.path.join(path, *normpath.split(posixpath.sep)) 235 return os.path.join(path, *normpath.split(posixpath.sep))
198 236
199 def get_repo_type(repo): 237 def get_repo_type(repo):
200 for name, repotype in repo_types.iteritems(): 238 for name, repotype in repo_types.iteritems():
201 if repotype.istype(repo): 239 if repotype.istype(repo):
202 return name 240 return name
203 return None 241 return None
204 242
205 def ensure_repo(parentrepo, target, roots, sourcename): 243 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
206 if os.path.exists(target): 244 if os.path.exists(target):
207 return 245 return
208 246
209 if SKIP_DEPENDENCY_UPDATES: 247 if SKIP_DEPENDENCY_UPDATES:
210 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 248 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
211 "%s not cloned", target) 249 "%s not cloned", target)
212 return 250 return
213 251
214 parenttype = get_repo_type(parentrepo)
215 type = None
216 for key in roots:
217 if key == parenttype or (key in repo_types and type is None):
218 type = key
219 if type is None:
220 raise Exception("No valid source found to create %s" % target)
221
222 postprocess_url = repo_types[type].postprocess_url 252 postprocess_url = repo_types[type].postprocess_url
223 root = postprocess_url(roots[type]) 253 root = postprocess_url(root)
224 sourcename = postprocess_url(sourcename) 254 sourcename = postprocess_url(sourcename)
225 255
226 if os.path.exists(root): 256 if os.path.exists(root):
227 url = os.path.join(root, sourcename) 257 url = os.path.join(root, sourcename)
228 else: 258 else:
229 url = urlparse.urljoin(root, sourcename) 259 url = urlparse.urljoin(root, sourcename)
230 260
231 logging.info("Cloning repository %s into %s" % (url, target)) 261 logging.info("Cloning repository %s into %s" % (url, target))
232 repo_types[type].clone(url, target) 262 repo_types[type].clone(url, target)
233 263 repo_types[parenttype].ignore(target, parentrepo)
234 for repo in repo_types.itervalues(): 264
235 if repo.istype(parentrepo): 265 def update_repo(target, type, revision):
236 repo.ignore(target, parentrepo)
237
238 def update_repo(target, revisions):
239 type = get_repo_type(target)
240 if type is None:
241 logging.warning("Type of repository %s unknown, skipping update" % target)
242 return
243
244 if type in revisions:
245 revision = revisions[type]
246 elif "*" in revisions:
247 revision = revisions["*"]
248 else:
249 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
250 return
251
252 resolved_revision = repo_types[type].get_revision_id(target, revision) 266 resolved_revision = repo_types[type].get_revision_id(target, revision)
253 current_revision = repo_types[type].get_revision_id(target) 267 current_revision = repo_types[type].get_revision_id(target)
254 268
255 if resolved_revision != current_revision: 269 if resolved_revision != current_revision:
256 if SKIP_DEPENDENCY_UPDATES: 270 if SKIP_DEPENDENCY_UPDATES:
257 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 271 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
258 "%s not checked out to %s", target, revision) 272 "%s not checked out to %s", target, revision)
259 return 273 return
260 274
261 if not resolved_revision: 275 if not resolved_revision:
(...skipping 12 matching lines...) Expand all
274 if level == 0: 288 if level == 0:
275 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 289 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE))
276 return 290 return
277 if level >= 10: 291 if level >= 10:
278 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 292 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
279 return 293 return
280 294
281 if overrideroots is not None: 295 if overrideroots is not None:
282 config["_root"] = overrideroots 296 config["_root"] = overrideroots
283 297
284 for dir, revisions in config.iteritems(): 298 for dir, sources in config.iteritems():
285 if dir.startswith("_") or revisions["_source"] in skipdependencies: 299 if (dir.startswith("_") or
300 skipdependencies.intersection([s[0] for s in sources if s[0]])):
286 continue 301 continue
302
287 target = safe_join(repodir, dir) 303 target = safe_join(repodir, dir)
288 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) 304 parenttype = get_repo_type(repodir)
289 update_repo(target, revisions) 305 _root = config.get("_root", {})
290 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies) 306
307 for key in sources.keys() + _root.keys():
308 if key == parenttype or key is None and vcs != "*":
309 vcs = key
310 source, rev = merge_seqs(sources.get("*"), sources.get(vcs))
311
312 if not (vcs and source and rev):
313 logging.warning("No valid source / revision found to create %s" % target)
314 continue
315
316 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source)
317 update_repo(target, vcs, rev)
318 resolve_deps(target, level + 1, self_update=False,
319 overrideroots=overrideroots, skipdependencies=skipdependencies)
291 320
292 if self_update and "_self" in config and "*" in config["_self"]: 321 if self_update and "_self" in config and "*" in config["_self"]:
293 source = safe_join(repodir, config["_self"]["*"]) 322 source = safe_join(repodir, config["_self"]["*"])
294 try: 323 try:
295 with io.open(source, "rb") as handle: 324 with io.open(source, "rb") as handle:
296 sourcedata = handle.read() 325 sourcedata = handle.read()
297 except IOError, e: 326 except IOError, e:
298 if e.errno != errno.ENOENT: 327 if e.errno != errno.ENOENT:
299 raise 328 raise
300 logging.warning("File %s doesn't exist, skipping self-update" % source) 329 logging.warning("File %s doesn't exist, skipping self-update" % source)
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
333 args = parser.parse_args() 362 args = parser.parse_args()
334 363
335 if args.quiet: 364 if args.quiet:
336 logging.disable(logging.INFO) 365 logging.disable(logging.INFO)
337 366
338 repos = args.repos 367 repos = args.repos
339 if not len(repos): 368 if not len(repos):
340 repos = [os.path.dirname(__file__)] 369 repos = [os.path.dirname(__file__)]
341 for repo in repos: 370 for repo in repos:
342 resolve_deps(repo) 371 resolve_deps(repo)
LEFTRIGHT
« no previous file | no next file » | Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Toggle Comments ('s')

Powered by Google App Engine
This is Rietveld