| Index: ensure_dependencies.py | 
| =================================================================== | 
| --- a/ensure_dependencies.py | 
| +++ b/ensure_dependencies.py | 
| @@ -9,16 +9,17 @@ import sys | 
| import os | 
| import posixpath | 
| import re | 
| import io | 
| import errno | 
| import logging | 
| import subprocess | 
| import urlparse | 
| +import argparse | 
|  | 
| from collections import OrderedDict | 
| from ConfigParser import RawConfigParser | 
|  | 
| USAGE = """ | 
| A dependencies file should look like this: | 
|  | 
| # VCS-specific root URLs for the repositories | 
| @@ -28,16 +29,20 @@ A dependencies file should look like thi | 
| # Check out elemhidehelper repository into extensions/elemhidehelper directory | 
| # at tag "1.2". | 
| extensions/elemhidehelper = elemhidehelper 1.2 | 
| # Check out buildtools repository into buildtools directory at VCS-specific | 
| # revision IDs. | 
| buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 | 
| """ | 
|  | 
| +SKIP_DEPENDENCY_UPDATES = os.environ.get( | 
| +  "SKIP_DEPENDENCY_UPDATES", "" | 
| +).lower() not in ("", "0", "false") | 
| + | 
| class Mercurial(): | 
| def istype(self, repodir): | 
| return os.path.exists(os.path.join(repodir, ".hg")) | 
|  | 
| def clone(self, source, target): | 
| if not source.endswith("/"): | 
| source += "/" | 
| subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, target]) | 
| @@ -73,41 +78,63 @@ class Mercurial(): | 
|  | 
| config.set("ui", "ignore.dependencies", ignore_path) | 
| with open(config_path, "w") as stream: | 
| config.write(stream) | 
|  | 
| module = os.path.relpath(target, repo) | 
| _ensure_line_exists(ignore_path, module) | 
|  | 
| +  def postprocess_url(self, url): | 
| +    return url | 
| + | 
| class Git(): | 
| def istype(self, repodir): | 
| return os.path.exists(os.path.join(repodir, ".git")) | 
|  | 
| def clone(self, source, target): | 
| source = source.rstrip("/") | 
| if not source.endswith(".git"): | 
| source += ".git" | 
| subprocess.check_call(["git", "clone", "--quiet", source, target]) | 
|  | 
| def get_revision_id(self, repo, rev="HEAD"): | 
| command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] | 
| return subprocess.check_output(command, cwd=repo).strip() | 
|  | 
| def pull(self, repo): | 
| +    # Fetch tracked branches, new tags and the list of available remote branches | 
| subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=repo) | 
| +    # Next we need to ensure all remote branches are tracked | 
| +    newly_tracked = False | 
| +    remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo) | 
| +    for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): | 
| +      remote, local = match.groups() | 
| +      with open(os.devnull, "wb") as devnull: | 
| +        if subprocess.call(["git", "branch", "--track", local, remote], | 
| +                           cwd=repo, stdout=devnull, stderr=devnull) == 0: | 
| +          newly_tracked = True | 
| +    # Finally fetch any newly tracked remote branches | 
| +    if newly_tracked: | 
| +      subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo) | 
|  | 
| def update(self, repo, rev): | 
| subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) | 
|  | 
| def ignore(self, target, repo): | 
| module = os.path.relpath(target, repo) | 
| exclude_file = os.path.join(repo, ".git", "info", "exclude") | 
| _ensure_line_exists(exclude_file, module) | 
|  | 
| +  def postprocess_url(self, url): | 
| +    # Handle alternative syntax of SSH URLS | 
| +    if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: | 
| +      return "ssh://" + url.replace(":", "/", 1) | 
| +    return url | 
| + | 
| repo_types = OrderedDict(( | 
| ("hg", Mercurial()), | 
| ("git", Git()), | 
| )) | 
|  | 
| def parse_spec(path, line): | 
| if "=" not in line: | 
| logging.warning("Invalid line in file %s: %s" % (path, line)) | 
| @@ -152,17 +179,17 @@ def read_deps(repodir): | 
| return result | 
| except IOError, e: | 
| if e.errno != errno.ENOENT: | 
| raise | 
| return None | 
|  | 
| def safe_join(path, subpath): | 
| # This has been inspired by Flask's safe_join() function | 
| -  forbidden = set([os.sep, os.altsep]) - set([posixpath.sep, None]) | 
| +  forbidden = {os.sep, os.altsep} - {posixpath.sep, None} | 
| if any(sep in subpath for sep in forbidden): | 
| raise Exception("Illegal directory separator in dependency path %s" % subpath) | 
|  | 
| normpath = posixpath.normpath(subpath) | 
| if posixpath.isabs(normpath): | 
| raise Exception("Dependency path %s cannot be absolute" % subpath) | 
| if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posixpath.sep): | 
| raise Exception("Dependency path %s has to be inside the repository" % subpath) | 
| @@ -173,25 +200,38 @@ def get_repo_type(repo): | 
| if repotype.istype(repo): | 
| return name | 
| return None | 
|  | 
| def ensure_repo(parentrepo, target, roots, sourcename): | 
| if os.path.exists(target): | 
| return | 
|  | 
| +  if SKIP_DEPENDENCY_UPDATES: | 
| +    logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " | 
| +                    "%s not cloned", target) | 
| +    return | 
| + | 
| parenttype = get_repo_type(parentrepo) | 
| type = None | 
| for key in roots: | 
| if key == parenttype or (key in repo_types and type is None): | 
| type = key | 
| if type is None: | 
| raise Exception("No valid source found to create %s" % target) | 
|  | 
| -  url = urlparse.urljoin(roots[type], sourcename) | 
| +  postprocess_url = repo_types[type].postprocess_url | 
| +  root = postprocess_url(roots[type]) | 
| +  sourcename = postprocess_url(sourcename) | 
| + | 
| +  if os.path.exists(root): | 
| +    url = os.path.join(root, sourcename) | 
| +  else: | 
| +    url = urlparse.urljoin(root, sourcename) | 
| + | 
| logging.info("Cloning repository %s into %s" % (url, target)) | 
| repo_types[type].clone(url, target) | 
|  | 
| for repo in repo_types.itervalues(): | 
| if repo.istype(parentrepo): | 
| repo.ignore(target, parentrepo) | 
|  | 
| def update_repo(target, revisions): | 
| @@ -204,36 +244,43 @@ def update_repo(target, revisions): | 
| revision = revisions[type] | 
| elif "*" in revisions: | 
| revision = revisions["*"] | 
| else: | 
| logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type)) | 
| return | 
|  | 
| resolved_revision = repo_types[type].get_revision_id(target, revision) | 
| -  if not resolved_revision: | 
| -    logging.info("Revision %s is unknown, downloading remote changes" % revision) | 
| -    repo_types[type].pull(target) | 
| -    resolved_revision = repo_types[type].get_revision_id(target, revision) | 
| +  current_revision = repo_types[type].get_revision_id(target) | 
| + | 
| +  if resolved_revision != current_revision: | 
| +    if SKIP_DEPENDENCY_UPDATES: | 
| +      logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " | 
| +                      "%s not checked out to %s", target, revision) | 
| +      return | 
| + | 
| if not resolved_revision: | 
| -      raise Exception("Failed to resolve revision %s" % revision) | 
| +      logging.info("Revision %s is unknown, downloading remote changes" % revision) | 
| +      repo_types[type].pull(target) | 
| +      resolved_revision = repo_types[type].get_revision_id(target, revision) | 
| +      if not resolved_revision: | 
| +        raise Exception("Failed to resolve revision %s" % revision) | 
|  | 
| -  current_revision = repo_types[type].get_revision_id(target) | 
| -  if resolved_revision != current_revision: | 
| logging.info("Updating repository %s to revision %s" % (target, resolved_revision)) | 
| repo_types[type].update(target, resolved_revision) | 
|  | 
| def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdependencies=set()): | 
| config = read_deps(repodir) | 
| if config is None: | 
| if level == 0: | 
| logging.warning("No dependencies file in directory %s, nothing to do...\n%s" % (repodir, USAGE)) | 
| return | 
| if level >= 10: | 
| logging.warning("Too much subrepository nesting, ignoring %s" % repo) | 
| +    return | 
|  | 
| if overrideroots is not None: | 
| config["_root"] = overrideroots | 
|  | 
| for dir, revisions in config.iteritems(): | 
| if dir.startswith("_") or revisions["_source"] in skipdependencies: | 
| continue | 
| target = safe_join(repodir, dir) | 
| @@ -273,13 +320,22 @@ def _ensure_line_exists(path, pattern): | 
| file_content.append(pattern) | 
| f.seek(0, os.SEEK_SET) | 
| f.truncate() | 
| for l in file_content: | 
| print >>f, l | 
|  | 
| if __name__ == "__main__": | 
| logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) | 
| -  repos = sys.argv[1:] | 
| + | 
| +  parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") | 
| +  parser.add_argument("repos", metavar="repository", type=str, nargs="*", help="Repository path") | 
| +  parser.add_argument("-q", "--quiet", action="store_true", help="Suppress informational output") | 
| +  args = parser.parse_args() | 
| + | 
| +  if args.quiet: | 
| +    logging.disable(logging.INFO) | 
| + | 
| +  repos = args.repos | 
| if not len(repos): | 
| -    repos = [os.getcwd()] | 
| +    repos = [os.path.dirname(__file__)] | 
| for repo in repos: | 
| resolve_deps(repo) | 
|  |