| OLD | NEW | 
|---|
| (Empty) |  | 
|  | 1 #!/usr/bin/env python | 
|  | 2 | 
|  | 3 # This Source Code Form is subject to the terms of the Mozilla Public | 
|  | 4 # License, v. 2.0. If a copy of the MPL was not distributed with this | 
|  | 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 
|  | 6 | 
|  | 7 import sys | 
|  | 8 import os | 
|  | 9 import posixpath | 
|  | 10 import re | 
|  | 11 import io | 
|  | 12 import errno | 
|  | 13 import logging | 
|  | 14 import subprocess | 
|  | 15 import urlparse | 
|  | 16 import argparse | 
|  | 17 | 
|  | 18 from collections import OrderedDict | 
|  | 19 from ConfigParser import RawConfigParser | 
|  | 20 | 
|  | 21 USAGE = ''' | 
|  | 22 A dependencies file should look like this: | 
|  | 23 | 
|  | 24   # VCS-specific root URLs for the repositories | 
|  | 25   _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ | 
|  | 26   # File to update this script from (optional) | 
|  | 27   _self = buildtools/ensure_dependencies.py | 
|  | 28   # Clone elemhidehelper repository into extensions/elemhidehelper directory at | 
|  | 29   # tag "1.2". | 
|  | 30   extensions/elemhidehelper = elemhidehelper 1.2 | 
|  | 31   # Clone buildtools repository into buildtools directory at VCS-specific | 
|  | 32   # revision IDs. | 
|  | 33   buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 | 
|  | 34   # Clone the adblockplus repository into adblockplus directory, overwriting the | 
|  | 35   # usual source URL for Git repository and specifying VCS specific revision IDs
     . | 
|  | 36   adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus.
     git@b2ffd52b | 
|  | 37   # Clone the adblockpluschrome repository into the adblockpluschrome directory, | 
|  | 38   # from a specific Git repository, specifying the revision ID. | 
|  | 39   adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 | 
|  | 40 ''' | 
|  | 41 | 
|  | 42 SKIP_DEPENDENCY_UPDATES = os.environ.get( | 
|  | 43     'SKIP_DEPENDENCY_UPDATES', '' | 
|  | 44 ).lower() not in ('', '0', 'false') | 
|  | 45 | 
|  | 46 | 
|  | 47 class Mercurial(): | 
|  | 48     def istype(self, repodir): | 
|  | 49         return os.path.exists(os.path.join(repodir, '.hg')) | 
|  | 50 | 
|  | 51     def clone(self, source, target): | 
|  | 52         if not source.endswith('/'): | 
|  | 53             source += '/' | 
|  | 54         subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t
     arget]) | 
|  | 55 | 
|  | 56     def get_revision_id(self, repo, rev=None): | 
|  | 57         command = ['hg', 'id', '--repository', repo, '--id'] | 
|  | 58         if rev: | 
|  | 59             command.extend(['--rev', rev]) | 
|  | 60 | 
|  | 61         # Ignore stderr output and return code here: if revision lookup failed w
     e | 
|  | 62         # should simply return an empty string. | 
|  | 63         result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro
     cess.PIPE).communicate()[0] | 
|  | 64         return result.strip() | 
|  | 65 | 
|  | 66     def pull(self, repo): | 
|  | 67         subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet']) | 
|  | 68 | 
|  | 69     def update(self, repo, rev, revname): | 
|  | 70         subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', 
     '--check', '--rev', rev]) | 
|  | 71 | 
|  | 72     def ignore(self, target, repo): | 
|  | 73         config_path = os.path.join(repo, '.hg', 'hgrc') | 
|  | 74         ignore_file = os.path.join('.hg', 'dependencies') | 
|  | 75         ignore_path = os.path.join(repo, ignore_file) | 
|  | 76 | 
|  | 77         config = RawConfigParser() | 
|  | 78         config.read(config_path) | 
|  | 79 | 
|  | 80         if not config.has_section('ui'): | 
|  | 81             config.add_section('ui') | 
|  | 82 | 
|  | 83         config.set('ui', 'ignore.dependencies', ignore_file) | 
|  | 84         with open(config_path, 'w') as stream: | 
|  | 85             config.write(stream) | 
|  | 86 | 
|  | 87         module = os.path.relpath(target, repo) | 
|  | 88         _ensure_line_exists(ignore_path, module) | 
|  | 89 | 
|  | 90     def postprocess_url(self, url): | 
|  | 91         return url | 
|  | 92 | 
|  | 93 | 
|  | 94 class Git(): | 
|  | 95     def istype(self, repodir): | 
|  | 96         return os.path.exists(os.path.join(repodir, '.git')) | 
|  | 97 | 
|  | 98     def clone(self, source, target): | 
|  | 99         source = source.rstrip('/') | 
|  | 100         if not source.endswith('.git'): | 
|  | 101             source += '.git' | 
|  | 102         subprocess.check_call(['git', 'clone', '--quiet', source, target]) | 
|  | 103 | 
|  | 104     def get_revision_id(self, repo, rev='HEAD'): | 
|  | 105         command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}'] | 
|  | 106         return subprocess.check_output(command, cwd=repo).strip() | 
|  | 107 | 
|  | 108     def pull(self, repo): | 
|  | 109         # Fetch tracked branches, new tags and the list of available remote bran
     ches | 
|  | 110         subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw
     d=repo) | 
|  | 111         # Next we need to ensure all remote branches are tracked | 
|  | 112         newly_tracked = False | 
|  | 113         remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re
     po) | 
|  | 114         for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M): | 
|  | 115             remote, local = match.groups() | 
|  | 116             with open(os.devnull, 'wb') as devnull: | 
|  | 117                 if subprocess.call(['git', 'branch', '--track', local, remote], | 
|  | 118                                    cwd=repo, stdout=devnull, stderr=devnull) == 
     0: | 
|  | 119                     newly_tracked = True | 
|  | 120         # Finally fetch any newly tracked remote branches | 
|  | 121         if newly_tracked: | 
|  | 122             subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep
     o) | 
|  | 123 | 
|  | 124     def update(self, repo, rev, revname): | 
|  | 125         subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo) | 
|  | 126 | 
|  | 127     def ignore(self, target, repo): | 
|  | 128         module = os.path.sep + os.path.relpath(target, repo) | 
|  | 129         exclude_file = os.path.join(repo, '.git', 'info', 'exclude') | 
|  | 130         _ensure_line_exists(exclude_file, module) | 
|  | 131 | 
|  | 132     def postprocess_url(self, url): | 
|  | 133         # Handle alternative syntax of SSH URLS | 
|  | 134         if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme: | 
|  | 135             return 'ssh://' + url.replace(':', '/', 1) | 
|  | 136         return url | 
|  | 137 | 
|  | 138 repo_types = OrderedDict(( | 
|  | 139     ('hg', Mercurial()), | 
|  | 140     ('git', Git()), | 
|  | 141 )) | 
|  | 142 | 
|  | 143 # [vcs:]value | 
|  | 144 item_regexp = re.compile( | 
|  | 145     '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?' | 
|  | 146     '(.+)$' | 
|  | 147 ) | 
|  | 148 | 
|  | 149 # [url@]rev | 
|  | 150 source_regexp = re.compile( | 
|  | 151     '^(?:(.*)@)?' | 
|  | 152     '(.+)$' | 
|  | 153 ) | 
|  | 154 | 
|  | 155 | 
|  | 156 def merge_seqs(seq1, seq2): | 
|  | 157     """Return a list of any truthy values from the suplied sequences | 
|  | 158 | 
|  | 159     (None, 2), (1,)      => [1, 2] | 
|  | 160     None, (1, 2)         => [1, 2] | 
|  | 161     (1, 2), (3, 4)       => [3, 4] | 
|  | 162     """ | 
|  | 163     return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ()) | 
|  | 164 | 
|  | 165 | 
|  | 166 def parse_spec(path, line): | 
|  | 167     if '=' not in line: | 
|  | 168         logging.warning('Invalid line in file %s: %s' % (path, line)) | 
|  | 169         return None, None | 
|  | 170 | 
|  | 171     key, value = line.split('=', 1) | 
|  | 172     key = key.strip() | 
|  | 173     items = value.split() | 
|  | 174     if not len(items): | 
|  | 175         logging.warning('No value specified for key %s in file %s' % (key, path)
     ) | 
|  | 176         return key, None | 
|  | 177 | 
|  | 178     result = OrderedDict() | 
|  | 179     is_dependency_field = not key.startswith('_') | 
|  | 180 | 
|  | 181     for i, item in enumerate(items): | 
|  | 182         try: | 
|  | 183             vcs, value = re.search(item_regexp, item).groups() | 
|  | 184             vcs = vcs or '*' | 
|  | 185             if is_dependency_field: | 
|  | 186                 if i == 0 and vcs == '*': | 
|  | 187                     # In order to be backwards compatible we have to assume that
      the first | 
|  | 188                     # source contains only a URL/path for the repo if it does no
     t contain | 
|  | 189                     # the VCS part | 
|  | 190                     url_rev = (value, None) | 
|  | 191                 else: | 
|  | 192                     url_rev = re.search(source_regexp, value).groups() | 
|  | 193                 result[vcs] = merge_seqs(result.get(vcs), url_rev) | 
|  | 194             else: | 
|  | 195                 if vcs in result: | 
|  | 196                     logging.warning('Ignoring duplicate value for type %r ' | 
|  | 197                                     '(key %r in file %r)' % (vcs, key, path)) | 
|  | 198                 result[vcs] = value | 
|  | 199         except AttributeError: | 
|  | 200             logging.warning('Ignoring invalid item %r for type %r ' | 
|  | 201                             '(key %r in file %r)' % (item, vcs, key, path)) | 
|  | 202             continue | 
|  | 203     return key, result | 
|  | 204 | 
|  | 205 | 
|  | 206 def read_deps(repodir): | 
|  | 207     result = {} | 
|  | 208     deps_path = os.path.join(repodir, 'dependencies') | 
|  | 209     try: | 
|  | 210         with io.open(deps_path, 'rt', encoding='utf-8') as handle: | 
|  | 211             for line in handle: | 
|  | 212                 # Remove comments and whitespace | 
|  | 213                 line = re.sub(r'#.*', '', line).strip() | 
|  | 214                 if not line: | 
|  | 215                     continue | 
|  | 216 | 
|  | 217                 key, spec = parse_spec(deps_path, line) | 
|  | 218                 if spec: | 
|  | 219                     result[key] = spec | 
|  | 220         return result | 
|  | 221     except IOError as e: | 
|  | 222         if e.errno != errno.ENOENT: | 
|  | 223             raise | 
|  | 224         return None | 
|  | 225 | 
|  | 226 | 
|  | 227 def safe_join(path, subpath): | 
|  | 228     # This has been inspired by Flask's safe_join() function | 
|  | 229     forbidden = {os.sep, os.altsep} - {posixpath.sep, None} | 
|  | 230     if any(sep in subpath for sep in forbidden): | 
|  | 231         raise Exception('Illegal directory separator in dependency path %s' % su
     bpath) | 
|  | 232 | 
|  | 233     normpath = posixpath.normpath(subpath) | 
|  | 234     if posixpath.isabs(normpath): | 
|  | 235         raise Exception('Dependency path %s cannot be absolute' % subpath) | 
|  | 236     if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po
     sixpath.sep): | 
|  | 237         raise Exception('Dependency path %s has to be inside the repository' % s
     ubpath) | 
|  | 238     return os.path.join(path, *normpath.split(posixpath.sep)) | 
|  | 239 | 
|  | 240 | 
|  | 241 def get_repo_type(repo): | 
|  | 242     for name, repotype in repo_types.iteritems(): | 
|  | 243         if repotype.istype(repo): | 
|  | 244             return name | 
|  | 245     return 'hg' | 
|  | 246 | 
|  | 247 | 
|  | 248 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): | 
|  | 249     if os.path.exists(target): | 
|  | 250         return | 
|  | 251 | 
|  | 252     if SKIP_DEPENDENCY_UPDATES: | 
|  | 253         logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' | 
|  | 254                         '%s not cloned', target) | 
|  | 255         return | 
|  | 256 | 
|  | 257     postprocess_url = repo_types[type].postprocess_url | 
|  | 258     root = postprocess_url(root) | 
|  | 259     sourcename = postprocess_url(sourcename) | 
|  | 260 | 
|  | 261     if os.path.exists(root): | 
|  | 262         url = os.path.join(root, sourcename) | 
|  | 263     else: | 
|  | 264         url = urlparse.urljoin(root, sourcename) | 
|  | 265 | 
|  | 266     logging.info('Cloning repository %s into %s' % (url, target)) | 
|  | 267     repo_types[type].clone(url, target) | 
|  | 268     repo_types[parenttype].ignore(target, parentrepo) | 
|  | 269 | 
|  | 270 | 
|  | 271 def update_repo(target, type, revision): | 
|  | 272     resolved_revision = repo_types[type].get_revision_id(target, revision) | 
|  | 273     current_revision = repo_types[type].get_revision_id(target) | 
|  | 274 | 
|  | 275     if resolved_revision != current_revision: | 
|  | 276         if SKIP_DEPENDENCY_UPDATES: | 
|  | 277             logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' | 
|  | 278                             '%s not checked out to %s', target, revision) | 
|  | 279             return | 
|  | 280 | 
|  | 281         if not resolved_revision: | 
|  | 282             logging.info('Revision %s is unknown, downloading remote changes' % 
     revision) | 
|  | 283             repo_types[type].pull(target) | 
|  | 284             resolved_revision = repo_types[type].get_revision_id(target, revisio
     n) | 
|  | 285             if not resolved_revision: | 
|  | 286                 raise Exception('Failed to resolve revision %s' % revision) | 
|  | 287 | 
|  | 288         logging.info('Updating repository %s to revision %s' % (target, resolved
     _revision)) | 
|  | 289         repo_types[type].update(target, resolved_revision, revision) | 
|  | 290 | 
|  | 291 | 
|  | 292 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep
     endencies=set()): | 
|  | 293     config = read_deps(repodir) | 
|  | 294     if config is None: | 
|  | 295         if level == 0: | 
|  | 296             logging.warning('No dependencies file in directory %s, nothing to do
     ...\n%s' % (repodir, USAGE)) | 
|  | 297         return | 
|  | 298     if level >= 10: | 
|  | 299         logging.warning('Too much subrepository nesting, ignoring %s' % repo) | 
|  | 300         return | 
|  | 301 | 
|  | 302     if overrideroots is not None: | 
|  | 303         config['_root'] = overrideroots | 
|  | 304 | 
|  | 305     for dir, sources in config.iteritems(): | 
|  | 306         if (dir.startswith('_') or | 
|  | 307             skipdependencies.intersection([s[0] for s in sources if s[0]])): | 
|  | 308             continue | 
|  | 309 | 
|  | 310         target = safe_join(repodir, dir) | 
|  | 311         parenttype = get_repo_type(repodir) | 
|  | 312         _root = config.get('_root', {}) | 
|  | 313 | 
|  | 314         for key in sources.keys() + _root.keys(): | 
|  | 315             if key == parenttype or key is None and vcs != '*': | 
|  | 316                 vcs = key | 
|  | 317         source, rev = merge_seqs(sources.get('*'), sources.get(vcs)) | 
|  | 318 | 
|  | 319         if not (vcs and source and rev): | 
|  | 320             logging.warning('No valid source / revision found to create %s' % ta
     rget) | 
|  | 321             continue | 
|  | 322 | 
|  | 323         ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source
     ) | 
|  | 324         update_repo(target, vcs, rev) | 
|  | 325         resolve_deps(target, level + 1, self_update=False, | 
|  | 326                      overrideroots=overrideroots, skipdependencies=skipdependenc
     ies) | 
|  | 327 | 
|  | 328     if self_update and '_self' in config and '*' in config['_self']: | 
|  | 329         source = safe_join(repodir, config['_self']['*']) | 
|  | 330         try: | 
|  | 331             with io.open(source, 'rb') as handle: | 
|  | 332                 sourcedata = handle.read() | 
|  | 333         except IOError as e: | 
|  | 334             if e.errno != errno.ENOENT: | 
|  | 335                 raise | 
|  | 336             logging.warning("File %s doesn't exist, skipping self-update" % sour
     ce) | 
|  | 337             return | 
|  | 338 | 
|  | 339         target = __file__ | 
|  | 340         with io.open(target, 'rb') as handle: | 
|  | 341             targetdata = handle.read() | 
|  | 342 | 
|  | 343         if sourcedata != targetdata: | 
|  | 344             logging.info("Updating %s from %s, don't forget to commit" % (target
     , source)) | 
|  | 345             with io.open(target, 'wb') as handle: | 
|  | 346                 handle.write(sourcedata) | 
|  | 347             if __name__ == '__main__': | 
|  | 348                 logging.info('Restarting %s' % target) | 
|  | 349                 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]
     ) | 
|  | 350             else: | 
|  | 351                 logging.warning('Cannot restart %s automatically, please rerun' 
     % target) | 
|  | 352 | 
|  | 353 | 
|  | 354 def _ensure_line_exists(path, pattern): | 
|  | 355     with open(path, 'a+') as f: | 
|  | 356         f.seek(0, os.SEEK_SET) | 
|  | 357         file_content = [l.strip() for l in f.readlines()] | 
|  | 358         if not pattern in file_content: | 
|  | 359             file_content.append(pattern) | 
|  | 360             f.seek(0, os.SEEK_SET) | 
|  | 361             f.truncate() | 
|  | 362             for l in file_content: | 
|  | 363                 print >>f, l | 
|  | 364 | 
|  | 365 if __name__ == '__main__': | 
|  | 366     logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) | 
|  | 367 | 
|  | 368     parser = argparse.ArgumentParser(description='Verify dependencies for a set 
     of repositories, by default the repository of this script.') | 
|  | 369     parser.add_argument('repos', metavar='repository', type=str, nargs='*', help
     ='Repository path') | 
|  | 370     parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf
     ormational output') | 
|  | 371     args = parser.parse_args() | 
|  | 372 | 
|  | 373     if args.quiet: | 
|  | 374         logging.disable(logging.INFO) | 
|  | 375 | 
|  | 376     repos = args.repos | 
|  | 377     if not len(repos): | 
|  | 378         repos = [os.path.dirname(__file__)] | 
|  | 379     for repo in repos: | 
|  | 380         resolve_deps(repo) | 
| OLD | NEW | 
|---|