| OLD | NEW | 
|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python | 
| 2 # coding: utf-8 | 2 # coding: utf-8 | 
| 3 | 3 | 
| 4 # This Source Code Form is subject to the terms of the Mozilla Public | 4 # This Source Code Form is subject to the terms of the Mozilla Public | 
| 5 # License, v. 2.0. If a copy of the MPL was not distributed with this | 5 # License, v. 2.0. If a copy of the MPL was not distributed with this | 
| 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 
| 7 | 7 | 
| 8 import sys | 8 import sys | 
| 9 import os | 9 import os | 
| 10 import posixpath | 10 import posixpath | 
| 11 import re | 11 import re | 
| 12 import io | 12 import io | 
| 13 import errno | 13 import errno | 
| 14 import logging | 14 import logging | 
| 15 import subprocess | 15 import subprocess | 
| 16 import urlparse | 16 import urlparse | 
|  | 17 import argparse | 
| 17 | 18 | 
| 18 from collections import OrderedDict | 19 from collections import OrderedDict | 
| 19 from ConfigParser import RawConfigParser | 20 from ConfigParser import RawConfigParser | 
| 20 | 21 | 
| 21 USAGE = """ | 22 USAGE = """ | 
| 22 A dependencies file should look like this: | 23 A dependencies file should look like this: | 
| 23 | 24 | 
| 24   # VCS-specific root URLs for the repositories | 25   # VCS-specific root URLs for the repositories | 
| 25   _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ | 26   _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ | 
| 26   # File to update this script from (optional) | 27   # File to update this script from (optional) | 
| 27   _self = buildtools/ensure_dependencies.py | 28   _self = buildtools/ensure_dependencies.py | 
| 28   # Check out elemhidehelper repository into extensions/elemhidehelper directory | 29   # Check out elemhidehelper repository into extensions/elemhidehelper directory | 
| 29   # at tag "1.2". | 30   # at tag "1.2". | 
| 30   extensions/elemhidehelper = elemhidehelper 1.2 | 31   extensions/elemhidehelper = elemhidehelper 1.2 | 
| 31   # Check out buildtools repository into buildtools directory at VCS-specific | 32   # Check out buildtools repository into buildtools directory at VCS-specific | 
| 32   # revision IDs. | 33   # revision IDs. | 
| 33   buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 | 34   buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 | 
| 34 """ | 35 """ | 
| 35 | 36 | 
|  | 37 SKIP_DEPENDENCY_UPDATES = os.environ.get( | 
|  | 38   "SKIP_DEPENDENCY_UPDATES", "" | 
|  | 39 ).lower() not in ("", "0", "false") | 
|  | 40 | 
| 36 class Mercurial(): | 41 class Mercurial(): | 
| 37   def istype(self, repodir): | 42   def istype(self, repodir): | 
| 38     return os.path.exists(os.path.join(repodir, ".hg")) | 43     return os.path.exists(os.path.join(repodir, ".hg")) | 
| 39 | 44 | 
| 40   def clone(self, source, target): | 45   def clone(self, source, target): | 
| 41     if not source.endswith("/"): | 46     if not source.endswith("/"): | 
| 42       source += "/" | 47       source += "/" | 
| 43     subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe
     t]) | 48     subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe
     t]) | 
| 44 | 49 | 
| 45   def get_revision_id(self, repo, rev=None): | 50   def get_revision_id(self, repo, rev=None): | 
| (...skipping 25 matching lines...) Expand all  Loading... | 
| 71       if not config.has_section("ui"): | 76       if not config.has_section("ui"): | 
| 72         config.add_section("ui") | 77         config.add_section("ui") | 
| 73 | 78 | 
| 74       config.set("ui", "ignore.dependencies", ignore_path) | 79       config.set("ui", "ignore.dependencies", ignore_path) | 
| 75       with open(config_path, "w") as stream: | 80       with open(config_path, "w") as stream: | 
| 76         config.write(stream) | 81         config.write(stream) | 
| 77 | 82 | 
| 78       module = os.path.relpath(target, repo) | 83       module = os.path.relpath(target, repo) | 
| 79       _ensure_line_exists(ignore_path, module) | 84       _ensure_line_exists(ignore_path, module) | 
| 80 | 85 | 
|  | 86   def postprocess_url(self, url): | 
|  | 87     return url | 
|  | 88 | 
| 81 class Git(): | 89 class Git(): | 
| 82   def istype(self, repodir): | 90   def istype(self, repodir): | 
| 83     return os.path.exists(os.path.join(repodir, ".git")) | 91     return os.path.exists(os.path.join(repodir, ".git")) | 
| 84 | 92 | 
| 85   def clone(self, source, target): | 93   def clone(self, source, target): | 
| 86     source = source.rstrip("/") | 94     source = source.rstrip("/") | 
| 87     if not source.endswith(".git"): | 95     if not source.endswith(".git"): | 
| 88       source += ".git" | 96       source += ".git" | 
| 89     subprocess.check_call(["git", "clone", "--quiet", source, target]) | 97     subprocess.check_call(["git", "clone", "--quiet", source, target]) | 
| 90 | 98 | 
| 91   def get_revision_id(self, repo, rev="HEAD"): | 99   def get_revision_id(self, repo, rev="HEAD"): | 
| 92     command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] | 100     command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] | 
| 93     return subprocess.check_output(command, cwd=repo).strip() | 101     return subprocess.check_output(command, cwd=repo).strip() | 
| 94 | 102 | 
| 95   def pull(self, repo): | 103   def pull(self, repo): | 
|  | 104     # Fetch tracked branches, new tags and the list of available remote branches | 
| 96     subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re
     po) | 105     subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re
     po) | 
|  | 106     # Next we need to ensure all remote branches are tracked | 
|  | 107     newly_tracked = False | 
|  | 108     remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo) | 
|  | 109     for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): | 
|  | 110       remote, local = match.groups() | 
|  | 111       with open(os.devnull, "wb") as devnull: | 
|  | 112         if subprocess.call(["git", "branch", "--track", local, remote], | 
|  | 113                            cwd=repo, stdout=devnull, stderr=devnull) == 0: | 
|  | 114           newly_tracked = True | 
|  | 115     # Finally fetch any newly tracked remote branches | 
|  | 116     if newly_tracked: | 
|  | 117       subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo) | 
| 97 | 118 | 
| 98   def update(self, repo, rev): | 119   def update(self, repo, rev): | 
| 99     subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) | 120     subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) | 
| 100 | 121 | 
| 101   def ignore(self, target, repo): | 122   def ignore(self, target, repo): | 
| 102     module = os.path.relpath(target, repo) | 123     module = os.path.relpath(target, repo) | 
| 103     exclude_file = os.path.join(repo, ".git", "info", "exclude") | 124     exclude_file = os.path.join(repo, ".git", "info", "exclude") | 
| 104     _ensure_line_exists(exclude_file, module) | 125     _ensure_line_exists(exclude_file, module) | 
| 105 | 126 | 
|  | 127   def postprocess_url(self, url): | 
|  | 128     # Handle alternative syntax of SSH URLS | 
|  | 129     if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: | 
|  | 130       return "ssh://" + url.replace(":", "/", 1) | 
|  | 131     return url | 
|  | 132 | 
| 106 repo_types = OrderedDict(( | 133 repo_types = OrderedDict(( | 
| 107   ("hg", Mercurial()), | 134   ("hg", Mercurial()), | 
| 108   ("git", Git()), | 135   ("git", Git()), | 
| 109 )) | 136 )) | 
| 110 | 137 | 
| 111 def parse_spec(path, line): | 138 def parse_spec(path, line): | 
| 112   if "=" not in line: | 139   if "=" not in line: | 
| 113     logging.warning("Invalid line in file %s: %s" % (path, line)) | 140     logging.warning("Invalid line in file %s: %s" % (path, line)) | 
| 114     return None, None | 141     return None, None | 
| 115 | 142 | 
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 150         if spec: | 177         if spec: | 
| 151           result[key] = spec | 178           result[key] = spec | 
| 152     return result | 179     return result | 
| 153   except IOError, e: | 180   except IOError, e: | 
| 154     if e.errno != errno.ENOENT: | 181     if e.errno != errno.ENOENT: | 
| 155       raise | 182       raise | 
| 156     return None | 183     return None | 
| 157 | 184 | 
| 158 def safe_join(path, subpath): | 185 def safe_join(path, subpath): | 
| 159   # This has been inspired by Flask's safe_join() function | 186   # This has been inspired by Flask's safe_join() function | 
| 160   forbidden = set([os.sep, os.altsep]) - set([posixpath.sep, None]) | 187   forbidden = {os.sep, os.altsep} - {posixpath.sep, None} | 
| 161   if any(sep in subpath for sep in forbidden): | 188   if any(sep in subpath for sep in forbidden): | 
| 162     raise Exception("Illegal directory separator in dependency path %s" % subpat
     h) | 189     raise Exception("Illegal directory separator in dependency path %s" % subpat
     h) | 
| 163 | 190 | 
| 164   normpath = posixpath.normpath(subpath) | 191   normpath = posixpath.normpath(subpath) | 
| 165   if posixpath.isabs(normpath): | 192   if posixpath.isabs(normpath): | 
| 166     raise Exception("Dependency path %s cannot be absolute" % subpath) | 193     raise Exception("Dependency path %s cannot be absolute" % subpath) | 
| 167   if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi
     xpath.sep): | 194   if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi
     xpath.sep): | 
| 168     raise Exception("Dependency path %s has to be inside the repository" % subpa
     th) | 195     raise Exception("Dependency path %s has to be inside the repository" % subpa
     th) | 
| 169   return os.path.join(path, *normpath.split(posixpath.sep)) | 196   return os.path.join(path, *normpath.split(posixpath.sep)) | 
| 170 | 197 | 
| 171 def get_repo_type(repo): | 198 def get_repo_type(repo): | 
| 172   for name, repotype in repo_types.iteritems(): | 199   for name, repotype in repo_types.iteritems(): | 
| 173     if repotype.istype(repo): | 200     if repotype.istype(repo): | 
| 174       return name | 201       return name | 
| 175   return None | 202   return None | 
| 176 | 203 | 
| 177 def ensure_repo(parentrepo, target, roots, sourcename): | 204 def ensure_repo(parentrepo, target, roots, sourcename): | 
| 178   if os.path.exists(target): | 205   if os.path.exists(target): | 
| 179     return | 206     return | 
| 180 | 207 | 
|  | 208   if SKIP_DEPENDENCY_UPDATES: | 
|  | 209     logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " | 
|  | 210                     "%s not cloned", target) | 
|  | 211     return | 
|  | 212 | 
| 181   parenttype = get_repo_type(parentrepo) | 213   parenttype = get_repo_type(parentrepo) | 
| 182   type = None | 214   type = None | 
| 183   for key in roots: | 215   for key in roots: | 
| 184     if key == parenttype or (key in repo_types and type is None): | 216     if key == parenttype or (key in repo_types and type is None): | 
| 185       type = key | 217       type = key | 
| 186   if type is None: | 218   if type is None: | 
| 187     raise Exception("No valid source found to create %s" % target) | 219     raise Exception("No valid source found to create %s" % target) | 
| 188 | 220 | 
| 189   url = urlparse.urljoin(roots[type], sourcename) | 221   postprocess_url = repo_types[type].postprocess_url | 
|  | 222   root = postprocess_url(roots[type]) | 
|  | 223   sourcename = postprocess_url(sourcename) | 
|  | 224 | 
|  | 225   if os.path.exists(root): | 
|  | 226     url = os.path.join(root, sourcename) | 
|  | 227   else: | 
|  | 228     url = urlparse.urljoin(root, sourcename) | 
|  | 229 | 
| 190   logging.info("Cloning repository %s into %s" % (url, target)) | 230   logging.info("Cloning repository %s into %s" % (url, target)) | 
| 191   repo_types[type].clone(url, target) | 231   repo_types[type].clone(url, target) | 
| 192 | 232 | 
| 193   for repo in repo_types.itervalues(): | 233   for repo in repo_types.itervalues(): | 
| 194     if repo.istype(parentrepo): | 234     if repo.istype(parentrepo): | 
| 195       repo.ignore(target, parentrepo) | 235       repo.ignore(target, parentrepo) | 
| 196 | 236 | 
| 197 def update_repo(target, revisions): | 237 def update_repo(target, revisions): | 
| 198   type = get_repo_type(target) | 238   type = get_repo_type(target) | 
| 199   if type is None: | 239   if type is None: | 
| 200     logging.warning("Type of repository %s unknown, skipping update" % target) | 240     logging.warning("Type of repository %s unknown, skipping update" % target) | 
| 201     return | 241     return | 
| 202 | 242 | 
| 203   if type in revisions: | 243   if type in revisions: | 
| 204     revision = revisions[type] | 244     revision = revisions[type] | 
| 205   elif "*" in revisions: | 245   elif "*" in revisions: | 
| 206     revision = revisions["*"] | 246     revision = revisions["*"] | 
| 207   else: | 247   else: | 
| 208     logging.warning("No revision specified for repository %s (type %s), skipping
      update" % (target, type)) | 248     logging.warning("No revision specified for repository %s (type %s), skipping
      update" % (target, type)) | 
| 209     return | 249     return | 
| 210 | 250 | 
| 211   resolved_revision = repo_types[type].get_revision_id(target, revision) | 251   resolved_revision = repo_types[type].get_revision_id(target, revision) | 
| 212   if not resolved_revision: | 252   current_revision = repo_types[type].get_revision_id(target) | 
| 213     logging.info("Revision %s is unknown, downloading remote changes" % revision
     ) | 253 | 
| 214     repo_types[type].pull(target) | 254   if resolved_revision != current_revision: | 
| 215     resolved_revision = repo_types[type].get_revision_id(target, revision) | 255     if SKIP_DEPENDENCY_UPDATES: | 
|  | 256       logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " | 
|  | 257                       "%s not checked out to %s", target, revision) | 
|  | 258       return | 
|  | 259 | 
| 216     if not resolved_revision: | 260     if not resolved_revision: | 
| 217       raise Exception("Failed to resolve revision %s" % revision) | 261       logging.info("Revision %s is unknown, downloading remote changes" % revisi
     on) | 
|  | 262       repo_types[type].pull(target) | 
|  | 263       resolved_revision = repo_types[type].get_revision_id(target, revision) | 
|  | 264       if not resolved_revision: | 
|  | 265         raise Exception("Failed to resolve revision %s" % revision) | 
| 218 | 266 | 
| 219   current_revision = repo_types[type].get_revision_id(target) |  | 
| 220   if resolved_revision != current_revision: |  | 
| 221     logging.info("Updating repository %s to revision %s" % (target, resolved_rev
     ision)) | 267     logging.info("Updating repository %s to revision %s" % (target, resolved_rev
     ision)) | 
| 222     repo_types[type].update(target, resolved_revision) | 268     repo_types[type].update(target, resolved_revision) | 
| 223 | 269 | 
| 224 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep
     endencies=set()): | 270 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep
     endencies=set()): | 
| 225   config = read_deps(repodir) | 271   config = read_deps(repodir) | 
| 226   if config is None: | 272   if config is None: | 
| 227     if level == 0: | 273     if level == 0: | 
| 228       logging.warning("No dependencies file in directory %s, nothing to do...\n%
     s" % (repodir, USAGE)) | 274       logging.warning("No dependencies file in directory %s, nothing to do...\n%
     s" % (repodir, USAGE)) | 
| 229     return | 275     return | 
| 230   if level >= 10: | 276   if level >= 10: | 
| 231     logging.warning("Too much subrepository nesting, ignoring %s" % repo) | 277     logging.warning("Too much subrepository nesting, ignoring %s" % repo) | 
|  | 278     return | 
| 232 | 279 | 
| 233   if overrideroots is not None: | 280   if overrideroots is not None: | 
| 234     config["_root"] = overrideroots | 281     config["_root"] = overrideroots | 
| 235 | 282 | 
| 236   for dir, revisions in config.iteritems(): | 283   for dir, revisions in config.iteritems(): | 
| 237     if dir.startswith("_") or revisions["_source"] in skipdependencies: | 284     if dir.startswith("_") or revisions["_source"] in skipdependencies: | 
| 238       continue | 285       continue | 
| 239     target = safe_join(repodir, dir) | 286     target = safe_join(repodir, dir) | 
| 240     ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) | 287     ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) | 
| 241     update_repo(target, revisions) | 288     update_repo(target, revisions) | 
| (...skipping 29 matching lines...) Expand all  Loading... | 
| 271     file_content = [l.strip() for l in f.readlines()] | 318     file_content = [l.strip() for l in f.readlines()] | 
| 272     if not pattern in file_content: | 319     if not pattern in file_content: | 
| 273       file_content.append(pattern) | 320       file_content.append(pattern) | 
| 274       f.seek(0, os.SEEK_SET) | 321       f.seek(0, os.SEEK_SET) | 
| 275       f.truncate() | 322       f.truncate() | 
| 276       for l in file_content: | 323       for l in file_content: | 
| 277         print >>f, l | 324         print >>f, l | 
| 278 | 325 | 
| 279 if __name__ == "__main__": | 326 if __name__ == "__main__": | 
| 280   logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) | 327   logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) | 
| 281   repos = sys.argv[1:] | 328 | 
|  | 329   parser = argparse.ArgumentParser(description="Verify dependencies for a set of
      repositories, by default the repository of this script.") | 
|  | 330   parser.add_argument("repos", metavar="repository", type=str, nargs="*", help="
     Repository path") | 
|  | 331   parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor
     mational output") | 
|  | 332   args = parser.parse_args() | 
|  | 333 | 
|  | 334   if args.quiet: | 
|  | 335     logging.disable(logging.INFO) | 
|  | 336 | 
|  | 337   repos = args.repos | 
| 282   if not len(repos): | 338   if not len(repos): | 
| 283     repos = [os.getcwd()] | 339     repos = [os.path.dirname(__file__)] | 
| 284   for repo in repos: | 340   for repo in repos: | 
| 285     resolve_deps(repo) | 341     resolve_deps(repo) | 
| OLD | NEW | 
|---|