| OLD | NEW | 
|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python | 
| 2 # coding: utf-8 |  | 
| 3 | 2 | 
| 4 # This Source Code Form is subject to the terms of the Mozilla Public | 3 # This Source Code Form is subject to the terms of the Mozilla Public | 
| 5 # License, v. 2.0. If a copy of the MPL was not distributed with this | 4 # License, v. 2.0. If a copy of the MPL was not distributed with this | 
| 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 
| 7 | 6 | 
| 8 import sys | 7 import sys | 
| 9 import os | 8 import os | 
| 10 import posixpath | 9 import posixpath | 
| 11 import re | 10 import re | 
| 12 import io | 11 import io | 
| 13 import errno | 12 import errno | 
| 14 import logging | 13 import logging | 
| 15 import subprocess | 14 import subprocess | 
| 16 import urlparse | 15 import urlparse | 
| 17 import argparse | 16 import argparse | 
| 18 | 17 | 
| 19 from collections import OrderedDict | 18 from collections import OrderedDict | 
| 20 from ConfigParser import RawConfigParser | 19 from ConfigParser import RawConfigParser | 
| 21 | 20 | 
| 22 USAGE = """ | 21 USAGE = ''' | 
| 23 A dependencies file should look like this: | 22 A dependencies file should look like this: | 
| 24 | 23 | 
| 25   # VCS-specific root URLs for the repositories | 24   # VCS-specific root URLs for the repositories | 
| 26   _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ | 25   _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ | 
| 27   # File to update this script from (optional) | 26   # File to update this script from (optional) | 
| 28   _self = buildtools/ensure_dependencies.py | 27   _self = buildtools/ensure_dependencies.py | 
| 29   # Clone elemhidehelper repository into extensions/elemhidehelper directory at | 28   # Clone elemhidehelper repository into extensions/elemhidehelper directory at | 
| 30   # tag "1.2". | 29   # tag "1.2". | 
| 31   extensions/elemhidehelper = elemhidehelper 1.2 | 30   extensions/elemhidehelper = elemhidehelper 1.2 | 
| 32   # Clone buildtools repository into buildtools directory at VCS-specific | 31   # Clone buildtools repository into buildtools directory at VCS-specific | 
| 33   # revision IDs. | 32   # revision IDs. | 
| 34   buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 | 33   buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 | 
| 35   # Clone the adblockplus repository into adblockplus directory, overwriting the | 34   # Clone the adblockplus repository into adblockplus directory, overwriting the | 
| 36   # usual source URL for Git repository and specifying VCS specific revision IDs
     . | 35   # usual source URL for Git repository and specifying VCS specific revision IDs
     . | 
| 37   adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus.
     git@b2ffd52b | 36   adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus.
     git@b2ffd52b | 
| 38   # Clone the adblockpluschrome repository into the adblockpluschrome directory, | 37   # Clone the adblockpluschrome repository into the adblockpluschrome directory, | 
| 39   # from a specific Git repository, specifying the revision ID. | 38   # from a specific Git repository, specifying the revision ID. | 
| 40   adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 | 39   adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 | 
| 41 """ | 40 ''' | 
| 42 | 41 | 
| 43 SKIP_DEPENDENCY_UPDATES = os.environ.get( | 42 SKIP_DEPENDENCY_UPDATES = os.environ.get( | 
| 44     "SKIP_DEPENDENCY_UPDATES", "" | 43     'SKIP_DEPENDENCY_UPDATES', '' | 
| 45 ).lower() not in ("", "0", "false") | 44 ).lower() not in ('', '0', 'false') | 
| 46 | 45 | 
| 47 | 46 | 
| 48 class Mercurial(): | 47 class Mercurial(): | 
| 49     def istype(self, repodir): | 48     def istype(self, repodir): | 
| 50         return os.path.exists(os.path.join(repodir, ".hg")) | 49         return os.path.exists(os.path.join(repodir, '.hg')) | 
| 51 | 50 | 
| 52     def clone(self, source, target): | 51     def clone(self, source, target, revision): | 
| 53         if not source.endswith("/"): | 52         if not source.endswith('/'): | 
| 54             source += "/" | 53             source += '/' | 
| 55         subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, t
     arget]) | 54         if not os.path.isdir(target): | 
|  | 55             os.makedirs(target) | 
|  | 56         subprocess.check_call(['hg', 'init', target]) | 
|  | 57         config_path = os.path.join(target, '.hg', 'hgrc') | 
|  | 58         config = RawConfigParser() | 
|  | 59         config.add_section('paths') | 
|  | 60         config.set('paths', 'default', source) | 
|  | 61         with open(config_path, 'w') as stream: | 
|  | 62             config.write(stream) | 
|  | 63         self.pull(target) | 
| 56 | 64 | 
| 57     def get_revision_id(self, repo, rev=None): | 65     def get_revision_id(self, repo, rev=None): | 
| 58         command = ["hg", "id", "--repository", repo, "--id"] | 66         command = ['hg', 'id', '--repository', repo, '--id'] | 
| 59         if rev: | 67         if rev: | 
| 60             command.extend(["--rev", rev]) | 68             command.extend(['--rev', rev]) | 
| 61 | 69 | 
| 62         # Ignore stderr output and return code here: if revision lookup failed w
     e | 70         # Ignore stderr output and return code here: if revision lookup failed w
     e | 
| 63         # should simply return an empty string. | 71         # should simply return an empty string. | 
| 64         result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro
     cess.PIPE).communicate()[0] | 72         result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro
     cess.PIPE).communicate()[0] | 
| 65         return result.strip() | 73         return result.strip() | 
| 66 | 74 | 
| 67     def pull(self, repo): | 75     def pull(self, repo): | 
| 68         subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) | 76         subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet']) | 
| 69 | 77 | 
| 70     def update(self, repo, rev, revname): | 78     def update(self, repo, rev, revname): | 
| 71         subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", 
     "--check", "--rev", rev]) | 79         subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', 
     '--check', '--rev', rev]) | 
| 72 | 80 | 
| 73     def ignore(self, target, repo): | 81     def ignore(self, target, repo): | 
|  | 82         config_path = os.path.join(repo, '.hg', 'hgrc') | 
|  | 83         ignore_file = os.path.join('.hg', 'dependencies') | 
|  | 84         ignore_path = os.path.join(repo, ignore_file) | 
| 74 | 85 | 
| 75         if not self.istype(target): | 86         config = RawConfigParser() | 
|  | 87         config.read(config_path) | 
| 76 | 88 | 
| 77             config_path = os.path.join(repo, ".hg", "hgrc") | 89         if not config.has_section('ui'): | 
| 78             ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencie
     s")) | 90             config.add_section('ui') | 
| 79 | 91 | 
| 80             config = RawConfigParser() | 92         config.set('ui', 'ignore.dependencies', ignore_file) | 
| 81             config.read(config_path) | 93         with open(config_path, 'w') as stream: | 
|  | 94             config.write(stream) | 
| 82 | 95 | 
| 83             if not config.has_section("ui"): | 96         module = os.path.relpath(target, repo) | 
| 84                 config.add_section("ui") | 97         _ensure_line_exists(ignore_path, module) | 
| 85 |  | 
| 86             config.set("ui", "ignore.dependencies", ignore_path) |  | 
| 87             with open(config_path, "w") as stream: |  | 
| 88                 config.write(stream) |  | 
| 89 |  | 
| 90             module = os.path.relpath(target, repo) |  | 
| 91             _ensure_line_exists(ignore_path, module) |  | 
| 92 | 98 | 
| 93     def postprocess_url(self, url): | 99     def postprocess_url(self, url): | 
| 94         return url | 100         return url | 
| 95 | 101 | 
| 96 | 102 | 
| 97 class Git(): | 103 class Git(): | 
| 98     def istype(self, repodir): | 104     def istype(self, repodir): | 
| 99         return os.path.exists(os.path.join(repodir, ".git")) | 105         return os.path.exists(os.path.join(repodir, '.git')) | 
| 100 | 106 | 
| 101     def clone(self, source, target): | 107     def clone(self, source, target, revision): | 
| 102         source = source.rstrip("/") | 108         source = source.rstrip('/') | 
| 103         if not source.endswith(".git"): | 109         if not source.endswith('.git'): | 
| 104             source += ".git" | 110             source += '.git' | 
| 105         subprocess.check_call(["git", "clone", "--quiet", source, target]) | 111         if not os.path.isdir(target): | 
|  | 112             os.makedirs(target) | 
|  | 113         subprocess.check_call(['git', 'init', '--quiet'], cwd=target) | 
|  | 114         subprocess.check_call(['git', 'remote', 'add', 'origin', source], cwd=ta
     rget) | 
|  | 115         self.pull(target) | 
|  | 116         # Manual call of update here because if the revision is the HEAD of the | 
|  | 117         # repository then update_repo, which is called after this clone method, | 
|  | 118         # cannot understand that checking out is still required and skips it. | 
|  | 119         # As the result there are no files. | 
|  | 120         resolved_revision = self.get_revision_id(target, revision) | 
|  | 121         self.update(target, resolved_revision, revision) | 
| 106 | 122 | 
| 107     def get_revision_id(self, repo, rev="HEAD"): | 123     def get_revision_id(self, repo, rev='HEAD'): | 
| 108         command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] | 124         command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}'] | 
| 109         return subprocess.check_output(command, cwd=repo).strip() | 125         return subprocess.check_output(command, cwd=repo).strip() | 
| 110 | 126 | 
| 111     def pull(self, repo): | 127     def pull(self, repo): | 
| 112         # Fetch tracked branches, new tags and the list of available remote bran
     ches | 128         # Fetch tracked branches, new tags and the list of available remote bran
     ches | 
| 113         subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cw
     d=repo) | 129         subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw
     d=repo) | 
| 114         # Next we need to ensure all remote branches are tracked | 130         # Next we need to ensure all remote branches are tracked | 
| 115         newly_tracked = False | 131         newly_tracked = False | 
| 116         remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=re
     po) | 132         remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re
     po) | 
| 117         for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): | 133         for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M): | 
| 118             remote, local = match.groups() | 134             remote, local = match.groups() | 
| 119             with open(os.devnull, "wb") as devnull: | 135             with open(os.devnull, 'wb') as devnull: | 
| 120                 if subprocess.call(["git", "branch", "--track", local, remote], | 136                 if subprocess.call(['git', 'branch', '--track', local, remote], | 
| 121                                    cwd=repo, stdout=devnull, stderr=devnull) == 
     0: | 137                                    cwd=repo, stdout=devnull, stderr=devnull) == 
     0: | 
| 122                     newly_tracked = True | 138                     newly_tracked = True | 
| 123         # Finally fetch any newly tracked remote branches | 139         # Finally fetch any newly tracked remote branches | 
| 124         if newly_tracked: | 140         if newly_tracked: | 
| 125             subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=rep
     o) | 141             subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep
     o) | 
| 126 | 142 | 
| 127     def update(self, repo, rev, revname): | 143     def update(self, repo, rev, revname): | 
| 128         subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo) | 144         subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo) | 
| 129 | 145 | 
| 130     def ignore(self, target, repo): | 146     def ignore(self, target, repo): | 
| 131         module = os.path.sep + os.path.relpath(target, repo) | 147         module = os.path.sep + os.path.relpath(target, repo) | 
| 132         exclude_file = os.path.join(repo, ".git", "info", "exclude") | 148         exclude_file = os.path.join(repo, '.git', 'info', 'exclude') | 
| 133         _ensure_line_exists(exclude_file, module) | 149         _ensure_line_exists(exclude_file, module) | 
| 134 | 150 | 
| 135     def postprocess_url(self, url): | 151     def postprocess_url(self, url): | 
| 136         # Handle alternative syntax of SSH URLS | 152         # Handle alternative syntax of SSH URLS | 
| 137         if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: | 153         if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme: | 
| 138             return "ssh://" + url.replace(":", "/", 1) | 154             return 'ssh://' + url.replace(':', '/', 1) | 
| 139         return url | 155         return url | 
| 140 | 156 | 
| 141 repo_types = OrderedDict(( | 157 repo_types = OrderedDict(( | 
| 142     ("hg", Mercurial()), | 158     ('hg', Mercurial()), | 
| 143     ("git", Git()), | 159     ('git', Git()), | 
| 144 )) | 160 )) | 
| 145 | 161 | 
| 146 # [vcs:]value | 162 # [vcs:]value | 
| 147 item_regexp = re.compile( | 163 item_regexp = re.compile( | 
| 148     "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?" | 164     '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?' | 
| 149     "(.+)$" | 165     '(.+)$' | 
| 150 ) | 166 ) | 
| 151 | 167 | 
| 152 # [url@]rev | 168 # [url@]rev | 
| 153 source_regexp = re.compile( | 169 source_regexp = re.compile( | 
| 154     "^(?:(.*)@)?" | 170     '^(?:(.*)@)?' | 
| 155     "(.+)$" | 171     '(.+)$' | 
| 156 ) | 172 ) | 
| 157 | 173 | 
| 158 | 174 | 
| 159 def merge_seqs(seq1, seq2): | 175 def merge_seqs(seq1, seq2): | 
| 160     """Return a list of any truthy values from the suplied sequences | 176     """Return a list of any truthy values from the suplied sequences | 
| 161 | 177 | 
| 162     (None, 2), (1,)      => [1, 2] | 178     (None, 2), (1,)      => [1, 2] | 
| 163     None, (1, 2)         => [1, 2] | 179     None, (1, 2)         => [1, 2] | 
| 164     (1, 2), (3, 4)       => [3, 4] | 180     (1, 2), (3, 4)       => [3, 4] | 
| 165     """ | 181     """ | 
| 166     return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ()) | 182     return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ()) | 
| 167 | 183 | 
| 168 | 184 | 
| 169 def parse_spec(path, line): | 185 def parse_spec(path, line): | 
| 170     if "=" not in line: | 186     if '=' not in line: | 
| 171         logging.warning("Invalid line in file %s: %s" % (path, line)) | 187         logging.warning('Invalid line in file %s: %s' % (path, line)) | 
| 172         return None, None | 188         return None, None | 
| 173 | 189 | 
| 174     key, value = line.split("=", 1) | 190     key, value = line.split('=', 1) | 
| 175     key = key.strip() | 191     key = key.strip() | 
| 176     items = value.split() | 192     items = value.split() | 
| 177     if not len(items): | 193     if not len(items): | 
| 178         logging.warning("No value specified for key %s in file %s" % (key, path)
     ) | 194         logging.warning('No value specified for key %s in file %s' % (key, path)
     ) | 
| 179         return key, None | 195         return key, None | 
| 180 | 196 | 
| 181     result = OrderedDict() | 197     result = OrderedDict() | 
| 182     is_dependency_field = not key.startswith("_") | 198     is_dependency_field = not key.startswith('_') | 
| 183 | 199 | 
| 184     for i, item in enumerate(items): | 200     for i, item in enumerate(items): | 
| 185         try: | 201         try: | 
| 186             vcs, value = re.search(item_regexp, item).groups() | 202             vcs, value = re.search(item_regexp, item).groups() | 
| 187             vcs = vcs or "*" | 203             vcs = vcs or '*' | 
| 188             if is_dependency_field: | 204             if is_dependency_field: | 
| 189                 if i == 0 and vcs == "*": | 205                 if i == 0 and vcs == '*': | 
| 190                     # In order to be backwards compatible we have to assume that
      the first | 206                     # In order to be backwards compatible we have to assume that
      the first | 
| 191                     # source contains only a URL/path for the repo if it does no
     t contain | 207                     # source contains only a URL/path for the repo if it does no
     t contain | 
| 192                     # the VCS part | 208                     # the VCS part | 
| 193                     url_rev = (value, None) | 209                     url_rev = (value, None) | 
| 194                 else: | 210                 else: | 
| 195                     url_rev = re.search(source_regexp, value).groups() | 211                     url_rev = re.search(source_regexp, value).groups() | 
| 196                 result[vcs] = merge_seqs(result.get(vcs), url_rev) | 212                 result[vcs] = merge_seqs(result.get(vcs), url_rev) | 
| 197             else: | 213             else: | 
| 198                 if vcs in result: | 214                 if vcs in result: | 
| 199                     logging.warning("Ignoring duplicate value for type %r " | 215                     logging.warning('Ignoring duplicate value for type %r ' | 
| 200                                     "(key %r in file %r)" % (vcs, key, path)) | 216                                     '(key %r in file %r)' % (vcs, key, path)) | 
| 201                 result[vcs] = value | 217                 result[vcs] = value | 
| 202         except AttributeError: | 218         except AttributeError: | 
| 203             logging.warning("Ignoring invalid item %r for type %r " | 219             logging.warning('Ignoring invalid item %r for type %r ' | 
| 204                             "(key %r in file %r)" % (item, vcs, key, path)) | 220                             '(key %r in file %r)' % (item, vcs, key, path)) | 
| 205             continue | 221             continue | 
| 206     return key, result | 222     return key, result | 
| 207 | 223 | 
| 208 | 224 | 
| 209 def read_deps(repodir): | 225 def read_deps(repodir): | 
| 210     result = {} | 226     result = {} | 
| 211     deps_path = os.path.join(repodir, "dependencies") | 227     deps_path = os.path.join(repodir, 'dependencies') | 
| 212     try: | 228     try: | 
| 213         with io.open(deps_path, "rt", encoding="utf-8") as handle: | 229         with io.open(deps_path, 'rt', encoding='utf-8') as handle: | 
| 214             for line in handle: | 230             for line in handle: | 
| 215                 # Remove comments and whitespace | 231                 # Remove comments and whitespace | 
| 216                 line = re.sub(r"#.*", "", line).strip() | 232                 line = re.sub(r'#.*', '', line).strip() | 
| 217                 if not line: | 233                 if not line: | 
| 218                     continue | 234                     continue | 
| 219 | 235 | 
| 220                 key, spec = parse_spec(deps_path, line) | 236                 key, spec = parse_spec(deps_path, line) | 
| 221                 if spec: | 237                 if spec: | 
| 222                     result[key] = spec | 238                     result[key] = spec | 
| 223         return result | 239         return result | 
| 224     except IOError, e: | 240     except IOError as e: | 
| 225         if e.errno != errno.ENOENT: | 241         if e.errno != errno.ENOENT: | 
| 226             raise | 242             raise | 
| 227         return None | 243         return None | 
| 228 | 244 | 
| 229 | 245 | 
| 230 def safe_join(path, subpath): | 246 def safe_join(path, subpath): | 
| 231     # This has been inspired by Flask's safe_join() function | 247     # This has been inspired by Flask's safe_join() function | 
| 232     forbidden = {os.sep, os.altsep} - {posixpath.sep, None} | 248     forbidden = {os.sep, os.altsep} - {posixpath.sep, None} | 
| 233     if any(sep in subpath for sep in forbidden): | 249     if any(sep in subpath for sep in forbidden): | 
| 234         raise Exception("Illegal directory separator in dependency path %s" % su
     bpath) | 250         raise Exception('Illegal directory separator in dependency path %s' % su
     bpath) | 
| 235 | 251 | 
| 236     normpath = posixpath.normpath(subpath) | 252     normpath = posixpath.normpath(subpath) | 
| 237     if posixpath.isabs(normpath): | 253     if posixpath.isabs(normpath): | 
| 238         raise Exception("Dependency path %s cannot be absolute" % subpath) | 254         raise Exception('Dependency path %s cannot be absolute' % subpath) | 
| 239     if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po
     sixpath.sep): | 255     if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po
     sixpath.sep): | 
| 240         raise Exception("Dependency path %s has to be inside the repository" % s
     ubpath) | 256         raise Exception('Dependency path %s has to be inside the repository' % s
     ubpath) | 
| 241     return os.path.join(path, *normpath.split(posixpath.sep)) | 257     return os.path.join(path, *normpath.split(posixpath.sep)) | 
| 242 | 258 | 
| 243 | 259 | 
| 244 def get_repo_type(repo): | 260 def get_repo_type(repo): | 
| 245     for name, repotype in repo_types.iteritems(): | 261     for name, repotype in repo_types.iteritems(): | 
| 246         if repotype.istype(repo): | 262         if repotype.istype(repo): | 
| 247             return name | 263             return name | 
| 248     return "hg" | 264     return 'hg' | 
| 249 | 265 | 
| 250 | 266 | 
| 251 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): | 267 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename, revision
     ): | 
| 252     if os.path.exists(target): | 268     repo = repo_types[type] | 
|  | 269     if repo.istype(target): | 
| 253         return | 270         return | 
| 254 | 271 | 
| 255     if SKIP_DEPENDENCY_UPDATES: | 272     if SKIP_DEPENDENCY_UPDATES: | 
| 256         logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " | 273         logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' | 
| 257                         "%s not cloned", target) | 274                         '%s not cloned', target) | 
| 258         return | 275         return | 
| 259 | 276 | 
| 260     postprocess_url = repo_types[type].postprocess_url | 277     root = repo.postprocess_url(root) | 
| 261     root = postprocess_url(root) | 278     sourcename = repo.postprocess_url(sourcename) | 
| 262     sourcename = postprocess_url(sourcename) |  | 
| 263 | 279 | 
| 264     if os.path.exists(root): | 280     if os.path.exists(root): | 
| 265         url = os.path.join(root, sourcename) | 281         url = os.path.join(root, sourcename) | 
| 266     else: | 282     else: | 
| 267         url = urlparse.urljoin(root, sourcename) | 283         url = urlparse.urljoin(root, sourcename) | 
| 268 | 284 | 
| 269     logging.info("Cloning repository %s into %s" % (url, target)) | 285     logging.info('Cloning repository %s into %s' % (url, target)) | 
| 270     repo_types[type].clone(url, target) | 286     repo.clone(url, target, revision) | 
| 271     repo_types[parenttype].ignore(target, parentrepo) | 287     repo_types[parenttype].ignore(target, parentrepo) | 
| 272 | 288 | 
| 273 | 289 | 
| 274 def update_repo(target, type, revision): | 290 def update_repo(target, type, revision): | 
| 275     resolved_revision = repo_types[type].get_revision_id(target, revision) | 291     resolved_revision = repo_types[type].get_revision_id(target, revision) | 
| 276     current_revision = repo_types[type].get_revision_id(target) | 292     current_revision = repo_types[type].get_revision_id(target) | 
| 277 | 293 | 
| 278     if resolved_revision != current_revision: | 294     if resolved_revision != current_revision: | 
| 279         if SKIP_DEPENDENCY_UPDATES: | 295         if SKIP_DEPENDENCY_UPDATES: | 
| 280             logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " | 296             logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' | 
| 281                             "%s not checked out to %s", target, revision) | 297                             '%s not checked out to %s', target, revision) | 
| 282             return | 298             return | 
| 283 | 299 | 
| 284         if not resolved_revision: | 300         if not resolved_revision: | 
| 285             logging.info("Revision %s is unknown, downloading remote changes" % 
     revision) | 301             logging.info('Revision %s is unknown, downloading remote changes' % 
     revision) | 
| 286             repo_types[type].pull(target) | 302             repo_types[type].pull(target) | 
| 287             resolved_revision = repo_types[type].get_revision_id(target, revisio
     n) | 303             resolved_revision = repo_types[type].get_revision_id(target, revisio
     n) | 
| 288             if not resolved_revision: | 304             if not resolved_revision: | 
| 289                 raise Exception("Failed to resolve revision %s" % revision) | 305                 raise Exception('Failed to resolve revision %s' % revision) | 
| 290 | 306 | 
| 291         logging.info("Updating repository %s to revision %s" % (target, resolved
     _revision)) | 307         logging.info('Updating repository %s to revision %s' % (target, resolved
     _revision)) | 
| 292         repo_types[type].update(target, resolved_revision, revision) | 308         repo_types[type].update(target, resolved_revision, revision) | 
| 293 | 309 | 
| 294 | 310 | 
| 295 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep
     endencies=set()): | 311 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep
     endencies=set()): | 
| 296     config = read_deps(repodir) | 312     config = read_deps(repodir) | 
| 297     if config is None: | 313     if config is None: | 
| 298         if level == 0: | 314         if level == 0: | 
| 299             logging.warning("No dependencies file in directory %s, nothing to do
     ...\n%s" % (repodir, USAGE)) | 315             logging.warning('No dependencies file in directory %s, nothing to do
     ...\n%s' % (repodir, USAGE)) | 
| 300         return | 316         return | 
| 301     if level >= 10: | 317     if level >= 10: | 
| 302         logging.warning("Too much subrepository nesting, ignoring %s" % repo) | 318         logging.warning('Too much subrepository nesting, ignoring %s' % repo) | 
| 303         return | 319         return | 
| 304 | 320 | 
| 305     if overrideroots is not None: | 321     if overrideroots is not None: | 
| 306         config["_root"] = overrideroots | 322         config['_root'] = overrideroots | 
| 307 | 323 | 
| 308     for dir, sources in config.iteritems(): | 324     for dir, sources in config.iteritems(): | 
| 309         if (dir.startswith("_") or | 325         if (dir.startswith('_') or | 
| 310             skipdependencies.intersection([s[0] for s in sources if s[0]])): | 326             skipdependencies.intersection([s[0] for s in sources if s[0]])): | 
| 311             continue | 327             continue | 
| 312 | 328 | 
| 313         target = safe_join(repodir, dir) | 329         target = safe_join(repodir, dir) | 
| 314         parenttype = get_repo_type(repodir) | 330         parenttype = get_repo_type(repodir) | 
| 315         _root = config.get("_root", {}) | 331         _root = config.get('_root', {}) | 
| 316 | 332 | 
| 317         for key in sources.keys() + _root.keys(): | 333         for key in sources.keys() + _root.keys(): | 
| 318             if key == parenttype or key is None and vcs != "*": | 334             if key == parenttype or key is None and vcs != '*': | 
| 319                 vcs = key | 335                 vcs = key | 
| 320         source, rev = merge_seqs(sources.get("*"), sources.get(vcs)) | 336         source, rev = merge_seqs(sources.get('*'), sources.get(vcs)) | 
| 321 | 337 | 
| 322         if not (vcs and source and rev): | 338         if not (vcs and source and rev): | 
| 323             logging.warning("No valid source / revision found to create %s" % ta
     rget) | 339             logging.warning('No valid source / revision found to create %s' % ta
     rget) | 
| 324             continue | 340             continue | 
| 325 | 341 | 
| 326         ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source
     ) | 342         ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source
     , rev) | 
| 327         update_repo(target, vcs, rev) | 343         update_repo(target, vcs, rev) | 
| 328         resolve_deps(target, level + 1, self_update=False, | 344         resolve_deps(target, level + 1, self_update=False, | 
| 329                      overrideroots=overrideroots, skipdependencies=skipdependenc
     ies) | 345                      overrideroots=overrideroots, skipdependencies=skipdependenc
     ies) | 
| 330 | 346 | 
| 331     if self_update and "_self" in config and "*" in config["_self"]: | 347     if self_update and '_self' in config and '*' in config['_self']: | 
| 332         source = safe_join(repodir, config["_self"]["*"]) | 348         source = safe_join(repodir, config['_self']['*']) | 
| 333         try: | 349         try: | 
| 334             with io.open(source, "rb") as handle: | 350             with io.open(source, 'rb') as handle: | 
| 335                 sourcedata = handle.read() | 351                 sourcedata = handle.read() | 
| 336         except IOError, e: | 352         except IOError as e: | 
| 337             if e.errno != errno.ENOENT: | 353             if e.errno != errno.ENOENT: | 
| 338                 raise | 354                 raise | 
| 339             logging.warning("File %s doesn't exist, skipping self-update" % sour
     ce) | 355             logging.warning("File %s doesn't exist, skipping self-update" % sour
     ce) | 
| 340             return | 356             return | 
| 341 | 357 | 
| 342         target = __file__ | 358         target = __file__ | 
| 343         with io.open(target, "rb") as handle: | 359         with io.open(target, 'rb') as handle: | 
| 344             targetdata = handle.read() | 360             targetdata = handle.read() | 
| 345 | 361 | 
| 346         if sourcedata != targetdata: | 362         if sourcedata != targetdata: | 
| 347             logging.info("Updating %s from %s, don't forget to commit" % (target
     , source)) | 363             logging.info("Updating %s from %s, don't forget to commit" % (target
     , source)) | 
| 348             with io.open(target, "wb") as handle: | 364             with io.open(target, 'wb') as handle: | 
| 349                 handle.write(sourcedata) | 365                 handle.write(sourcedata) | 
| 350             if __name__ == "__main__": | 366             if __name__ == '__main__': | 
| 351                 logging.info("Restarting %s" % target) | 367                 logging.info('Restarting %s' % target) | 
| 352                 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]
     ) | 368                 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]
     ) | 
| 353             else: | 369             else: | 
| 354                 logging.warning("Cannot restart %s automatically, please rerun" 
     % target) | 370                 logging.warning('Cannot restart %s automatically, please rerun' 
     % target) | 
| 355 | 371 | 
| 356 | 372 | 
| 357 def _ensure_line_exists(path, pattern): | 373 def _ensure_line_exists(path, pattern): | 
| 358     with open(path, 'a+') as f: | 374     with open(path, 'a+') as f: | 
|  | 375         f.seek(0, os.SEEK_SET) | 
| 359         file_content = [l.strip() for l in f.readlines()] | 376         file_content = [l.strip() for l in f.readlines()] | 
| 360         if not pattern in file_content: | 377         if not pattern in file_content: | 
| 361             file_content.append(pattern) | 378             file_content.append(pattern) | 
| 362             f.seek(0, os.SEEK_SET) | 379             f.seek(0, os.SEEK_SET) | 
| 363             f.truncate() | 380             f.truncate() | 
| 364             for l in file_content: | 381             for l in file_content: | 
| 365                 print >>f, l | 382                 print >>f, l | 
| 366 | 383 | 
| 367 if __name__ == "__main__": | 384 if __name__ == '__main__': | 
| 368     logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) | 385     logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) | 
| 369 | 386 | 
| 370     parser = argparse.ArgumentParser(description="Verify dependencies for a set 
     of repositories, by default the repository of this script.") | 387     parser = argparse.ArgumentParser(description='Verify dependencies for a set 
     of repositories, by default the repository of this script.') | 
| 371     parser.add_argument("repos", metavar="repository", type=str, nargs="*", help
     ="Repository path") | 388     parser.add_argument('repos', metavar='repository', type=str, nargs='*', help
     ='Repository path') | 
| 372     parser.add_argument("-q", "--quiet", action="store_true", help="Suppress inf
     ormational output") | 389     parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf
     ormational output') | 
| 373     args = parser.parse_args() | 390     args = parser.parse_args() | 
| 374 | 391 | 
| 375     if args.quiet: | 392     if args.quiet: | 
| 376         logging.disable(logging.INFO) | 393         logging.disable(logging.INFO) | 
| 377 | 394 | 
| 378     repos = args.repos | 395     repos = args.repos | 
| 379     if not len(repos): | 396     if not len(repos): | 
| 380         repos = [os.path.dirname(__file__)] | 397         repos = [os.path.dirname(__file__)] | 
| 381     for repo in repos: | 398     for repo in repos: | 
| 382         resolve_deps(repo) | 399         resolve_deps(repo) | 
| OLD | NEW | 
|---|