| OLD | NEW | 
|    1 #!/usr/bin/env python |    1 #!/usr/bin/env python | 
|    2  |    2  | 
|    3 # This Source Code Form is subject to the terms of the Mozilla Public |    3 # This Source Code Form is subject to the terms of the Mozilla Public | 
|    4 # License, v. 2.0. If a copy of the MPL was not distributed with this |    4 # License, v. 2.0. If a copy of the MPL was not distributed with this | 
|    5 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |    5 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 
|    6  |    6  | 
|    7 import sys |    7 import sys | 
|    8 import os |    8 import os | 
|    9 import posixpath |    9 import posixpath | 
|   10 import re |   10 import re | 
|   11 import io |   11 import io | 
|   12 import errno |   12 import errno | 
|   13 import logging |   13 import logging | 
|   14 import subprocess |   14 import subprocess | 
|   15 import urlparse |   15 import urlparse | 
|   16 import argparse |   16 import argparse | 
 |   17 import json | 
|   17  |   18  | 
|   18 from collections import OrderedDict |   19 from collections import OrderedDict | 
|   19 from ConfigParser import RawConfigParser |   20 from ConfigParser import RawConfigParser | 
|   20  |   21  | 
|   21 USAGE = ''' |   22 USAGE = ''' | 
|   22 A dependencies file should look like this: |   23 A dependencies file should look like this: | 
|   23  |   24  | 
|   24   # VCS-specific root URLs for the repositories |   25   # VCS-specific root URLs for the repositories | 
|   25   _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ |   26   _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ | 
|   26   # File to update this script from (optional) |   27   # File to update this script from (optional) | 
|   27   _self = buildtools/ensure_dependencies.py |   28   _self = buildtools/ensure_dependencies.py | 
|   28   # Clone elemhidehelper repository into extensions/elemhidehelper directory at |   29   # Clone elemhidehelper repository into extensions/elemhidehelper directory at | 
|   29   # tag "1.2". |   30   # tag "1.2". | 
|   30   extensions/elemhidehelper = elemhidehelper 1.2 |   31   extensions/elemhidehelper = elemhidehelper 1.2 | 
|   31   # Clone buildtools repository into buildtools directory at VCS-specific |   32   # Clone buildtools repository into buildtools directory at VCS-specific | 
|   32   # revision IDs. |   33   # revision IDs. | 
|   33   buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 |   34   buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 | 
|   34   # Clone the adblockplus repository into adblockplus directory, overwriting the |   35   # Clone the adblockplus repository into adblockplus directory, overwriting the | 
|   35   # usual source URL for Git repository and specifying VCS specific revision IDs
     . |   36   # usual source URL for Git repository and specifying VCS specific revision IDs
     . | 
|   36   adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus.
     git@b2ffd52b |   37   adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus.
     git@b2ffd52b | 
|   37   # Clone the adblockpluschrome repository into the adblockpluschrome directory, |   38   # Clone the adblockpluschrome repository into the adblockpluschrome directory, | 
|   38   # from a specific Git repository, specifying the revision ID. |   39   # from a specific Git repository, specifying the revision ID. | 
|   39   adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 |   40   adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 | 
|   40 ''' |   41 ''' | 
|   41  |   42  | 
|   42 SKIP_DEPENDENCY_UPDATES = os.environ.get( |   43 SKIP_DEPENDENCY_UPDATES = os.environ.get( | 
|   43     'SKIP_DEPENDENCY_UPDATES', '' |   44     'SKIP_DEPENDENCY_UPDATES', '' | 
|   44 ).lower() not in ('', '0', 'false') |   45 ).lower() not in ('', '0', 'false') | 
|   45  |   46  | 
 |   47 NPM_LOCKFILE = '.npm_install_lock' | 
|   46  |   48  | 
|   47 class Mercurial(): |   49  | 
 |   50 class Mercurial: | 
|   48     def istype(self, repodir): |   51     def istype(self, repodir): | 
|   49         return os.path.exists(os.path.join(repodir, '.hg')) |   52         return os.path.exists(os.path.join(repodir, '.hg')) | 
|   50  |   53  | 
|   51     def clone(self, source, target): |   54     def clone(self, source, target): | 
|   52         if not source.endswith('/'): |   55         if not source.endswith('/'): | 
|   53             source += '/' |   56             source += '/' | 
|   54         subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t
     arget]) |   57         subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t
     arget]) | 
|   55  |   58  | 
|   56     def get_revision_id(self, repo, rev=None): |   59     def get_revision_id(self, repo, rev=None): | 
|   57         command = ['hg', 'id', '--repository', repo, '--id'] |   60         command = ['hg', 'id', '--repository', repo, '--id'] | 
| (...skipping 26 matching lines...) Expand all  Loading... | 
|   84         with open(config_path, 'w') as stream: |   87         with open(config_path, 'w') as stream: | 
|   85             config.write(stream) |   88             config.write(stream) | 
|   86  |   89  | 
|   87         module = os.path.relpath(target, repo) |   90         module = os.path.relpath(target, repo) | 
|   88         _ensure_line_exists(ignore_path, module) |   91         _ensure_line_exists(ignore_path, module) | 
|   89  |   92  | 
|   90     def postprocess_url(self, url): |   93     def postprocess_url(self, url): | 
|   91         return url |   94         return url | 
|   92  |   95  | 
|   93  |   96  | 
|   94 class Git(): |   97 class Git: | 
|   95     def istype(self, repodir): |   98     def istype(self, repodir): | 
|   96         return os.path.exists(os.path.join(repodir, '.git')) |   99         return os.path.exists(os.path.join(repodir, '.git')) | 
|   97  |  100  | 
|   98     def clone(self, source, target): |  101     def clone(self, source, target): | 
|   99         source = source.rstrip('/') |  102         source = source.rstrip('/') | 
|  100         if not source.endswith('.git'): |  103         if not source.endswith('.git'): | 
|  101             source += '.git' |  104             source += '.git' | 
|  102         subprocess.check_call(['git', 'clone', '--quiet', source, target]) |  105         subprocess.check_call(['git', 'clone', '--quiet', source, target]) | 
|  103  |  106  | 
|  104     def get_revision_id(self, repo, rev='HEAD'): |  107     def get_revision_id(self, repo, rev='HEAD'): | 
| (...skipping 23 matching lines...) Expand all  Loading... | 
|  128         module = os.path.sep + os.path.relpath(target, repo) |  131         module = os.path.sep + os.path.relpath(target, repo) | 
|  129         exclude_file = os.path.join(repo, '.git', 'info', 'exclude') |  132         exclude_file = os.path.join(repo, '.git', 'info', 'exclude') | 
|  130         _ensure_line_exists(exclude_file, module) |  133         _ensure_line_exists(exclude_file, module) | 
|  131  |  134  | 
|  132     def postprocess_url(self, url): |  135     def postprocess_url(self, url): | 
|  133         # Handle alternative syntax of SSH URLS |  136         # Handle alternative syntax of SSH URLS | 
|  134         if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme: |  137         if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme: | 
|  135             return 'ssh://' + url.replace(':', '/', 1) |  138             return 'ssh://' + url.replace(':', '/', 1) | 
|  136         return url |  139         return url | 
|  137  |  140  | 
 |  141  | 
|  138 repo_types = OrderedDict(( |  142 repo_types = OrderedDict(( | 
|  139     ('hg', Mercurial()), |  143     ('hg', Mercurial()), | 
|  140     ('git', Git()), |  144     ('git', Git()), | 
|  141 )) |  145 )) | 
|  142  |  146  | 
|  143 # [vcs:]value |  147 # [vcs:]value | 
|  144 item_regexp = re.compile( |  148 item_regexp = re.compile( | 
|  145     '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?' |  149     '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?' | 
|  146     '(.+)$' |  150     '(.+)$' | 
|  147 ) |  151 ) | 
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  238     return os.path.join(path, *normpath.split(posixpath.sep)) |  242     return os.path.join(path, *normpath.split(posixpath.sep)) | 
|  239  |  243  | 
|  240  |  244  | 
|  241 def get_repo_type(repo): |  245 def get_repo_type(repo): | 
|  242     for name, repotype in repo_types.iteritems(): |  246     for name, repotype in repo_types.iteritems(): | 
|  243         if repotype.istype(repo): |  247         if repotype.istype(repo): | 
|  244             return name |  248             return name | 
|  245     return 'hg' |  249     return 'hg' | 
|  246  |  250  | 
|  247  |  251  | 
 |  252 def resolve_npm_dependencies(target, vcs): | 
 |  253     """Install Node.js production-only dependencies if necessary and desired. | 
 |  254  | 
 |  255     When the target dependency has additional Node.js dependencies declared | 
 |  256     run "npm install --only=production --loglevel=warn" to resolve the declared | 
 |  257     dependencies. | 
 |  258  | 
 |  259     Additionally, make sure that any VCS will ignore the installed files. | 
 |  260  | 
 |  261     Requires Node.js to be installed locally. | 
 |  262     """ | 
 |  263     try: | 
 |  264         with open(os.path.join(target, 'package.json'), 'r') as fp: | 
 |  265             package_data = json.load(fp) | 
 |  266  | 
 |  267         # In case a package.json does not exist at all or if there are no | 
 |  268         # production dependencies declared, we don't need to run npm and can | 
 |  269         # bail out early. | 
 |  270         if not package_data.get('dependencies', False): | 
 |  271             return | 
 |  272     except IOError: | 
 |  273         return | 
 |  274  | 
 |  275     try: | 
 |  276         # Create an empty file, which gets deleted after successfully | 
 |  277         # installing Node.js dependencies. | 
 |  278         lockfile_path = os.path.join(target, NPM_LOCKFILE) | 
 |  279         open(lockfile_path, 'a').close() | 
 |  280  | 
 |  281         if os.name == 'nt': | 
 |  282             # Windows' CreateProcess() (called by subprocess.Popen()) only | 
 |  283             # resolves executables ending in .exe. The windows installation of | 
 |  284             # Node.js only provides a npm.cmd, which is executable but won't | 
 |  285             # be recognized as such by CreateProcess(). | 
 |  286             npm_exec = 'npm.cmd' | 
 |  287         else: | 
 |  288             npm_exec = 'npm' | 
 |  289  | 
 |  290         cmd = [npm_exec, 'install', '--only=production', '--loglevel=warn', | 
 |  291                '--no-package-lock', '--no-optional'] | 
 |  292         subprocess.check_output(cmd, cwd=target) | 
 |  293  | 
 |  294         repo_types[vcs].ignore(os.path.join(target, NPM_LOCKFILE), target) | 
 |  295         repo_types[vcs].ignore(os.path.join(target, 'node_modules'), target) | 
 |  296  | 
 |  297         os.remove(lockfile_path) | 
 |  298     except OSError as e: | 
 |  299         import errno | 
 |  300         if e.errno == errno.ENOENT: | 
 |  301             logging.error('Failed to install Node.js dependencies for %s,' | 
 |  302                           ' please ensure Node.js is installed.', target) | 
 |  303         else: | 
 |  304             raise | 
 |  305  | 
 |  306  | 
|  248 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): |  307 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): | 
|  249     if os.path.exists(target): |  308     if os.path.exists(target): | 
|  250         return |  309         return False | 
|  251  |  310  | 
|  252     if SKIP_DEPENDENCY_UPDATES: |  311     if SKIP_DEPENDENCY_UPDATES: | 
|  253         logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' |  312         logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' | 
|  254                         '%s not cloned', target) |  313                         '%s not cloned', target) | 
|  255         return |  314         return False | 
|  256  |  315  | 
|  257     postprocess_url = repo_types[type].postprocess_url |  316     postprocess_url = repo_types[type].postprocess_url | 
|  258     root = postprocess_url(root) |  317     root = postprocess_url(root) | 
|  259     sourcename = postprocess_url(sourcename) |  318     sourcename = postprocess_url(sourcename) | 
|  260  |  319  | 
|  261     if os.path.exists(root): |  320     if os.path.exists(root): | 
|  262         url = os.path.join(root, sourcename) |  321         url = os.path.join(root, sourcename) | 
|  263     else: |  322     else: | 
|  264         url = urlparse.urljoin(root, sourcename) |  323         url = urlparse.urljoin(root, sourcename) | 
|  265  |  324  | 
|  266     logging.info('Cloning repository %s into %s' % (url, target)) |  325     logging.info('Cloning repository %s into %s' % (url, target)) | 
|  267     repo_types[type].clone(url, target) |  326     repo_types[type].clone(url, target) | 
|  268     repo_types[parenttype].ignore(target, parentrepo) |  327     repo_types[parenttype].ignore(target, parentrepo) | 
 |  328     return True | 
|  269  |  329  | 
|  270  |  330  | 
|  271 def update_repo(target, type, revision): |  331 def update_repo(target, type, revision): | 
|  272     resolved_revision = repo_types[type].get_revision_id(target, revision) |  332     resolved_revision = repo_types[type].get_revision_id(target, revision) | 
|  273     current_revision = repo_types[type].get_revision_id(target) |  333     current_revision = repo_types[type].get_revision_id(target) | 
|  274  |  334  | 
|  275     if resolved_revision != current_revision: |  335     if resolved_revision != current_revision: | 
|  276         if SKIP_DEPENDENCY_UPDATES: |  336         if SKIP_DEPENDENCY_UPDATES: | 
|  277             logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' |  337             logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' | 
|  278                             '%s not checked out to %s', target, revision) |  338                             '%s not checked out to %s', target, revision) | 
|  279             return |  339             return False | 
|  280  |  340  | 
|  281         if not resolved_revision: |  341         if not resolved_revision: | 
|  282             logging.info('Revision %s is unknown, downloading remote changes' % 
     revision) |  342             logging.info('Revision %s is unknown, downloading remote changes' % 
     revision) | 
|  283             repo_types[type].pull(target) |  343             repo_types[type].pull(target) | 
|  284             resolved_revision = repo_types[type].get_revision_id(target, revisio
     n) |  344             resolved_revision = repo_types[type].get_revision_id(target, revisio
     n) | 
|  285             if not resolved_revision: |  345             if not resolved_revision: | 
|  286                 raise Exception('Failed to resolve revision %s' % revision) |  346                 raise Exception('Failed to resolve revision %s' % revision) | 
|  287  |  347  | 
|  288         logging.info('Updating repository %s to revision %s' % (target, resolved
     _revision)) |  348         logging.info('Updating repository %s to revision %s' % (target, resolved
     _revision)) | 
|  289         repo_types[type].update(target, resolved_revision, revision) |  349         repo_types[type].update(target, resolved_revision, revision) | 
 |  350         return True | 
 |  351     return False | 
|  290  |  352  | 
|  291  |  353  | 
|  292 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep
     endencies=set()): |  354 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep
     endencies=set()): | 
|  293     config = read_deps(repodir) |  355     config = read_deps(repodir) | 
|  294     if config is None: |  356     if config is None: | 
|  295         if level == 0: |  357         if level == 0: | 
|  296             logging.warning('No dependencies file in directory %s, nothing to do
     ...\n%s' % (repodir, USAGE)) |  358             logging.warning('No dependencies file in directory %s, nothing to do
     ...\n%s' % (repodir, USAGE)) | 
|  297         return |  359         return | 
|  298     if level >= 10: |  360     if level >= 10: | 
|  299         logging.warning('Too much subrepository nesting, ignoring %s' % repo) |  361         logging.warning('Too much subrepository nesting, ignoring %s' % repo) | 
|  300         return |  362         return | 
|  301  |  363  | 
|  302     if overrideroots is not None: |  364     if overrideroots is not None: | 
|  303         config['_root'] = overrideroots |  365         config['_root'] = overrideroots | 
|  304  |  366  | 
|  305     for dir, sources in config.iteritems(): |  367     for dir, sources in sorted(config.iteritems()): | 
|  306         if (dir.startswith('_') or |  368         if (dir.startswith('_') or | 
|  307             skipdependencies.intersection([s[0] for s in sources if s[0]])): |  369             skipdependencies.intersection([s[0] for s in sources if s[0]])): | 
|  308             continue |  370             continue | 
|  309  |  371  | 
|  310         target = safe_join(repodir, dir) |  372         target = safe_join(repodir, dir) | 
|  311         parenttype = get_repo_type(repodir) |  373         parenttype = get_repo_type(repodir) | 
|  312         _root = config.get('_root', {}) |  374         _root = config.get('_root', {}) | 
|  313  |  375  | 
|  314         for key in sources.keys() + _root.keys(): |  376         for key in sources.keys() + _root.keys(): | 
|  315             if key == parenttype or key is None and vcs != '*': |  377             if key == parenttype or key is None and vcs != '*': | 
|  316                 vcs = key |  378                 vcs = key | 
|  317         source, rev = merge_seqs(sources.get('*'), sources.get(vcs)) |  379         source, rev = merge_seqs(sources.get('*'), sources.get(vcs)) | 
|  318  |  380  | 
|  319         if not (vcs and source and rev): |  381         if not (vcs and source and rev): | 
|  320             logging.warning('No valid source / revision found to create %s' % ta
     rget) |  382             logging.warning('No valid source / revision found to create %s' % ta
     rget) | 
|  321             continue |  383             continue | 
|  322  |  384  | 
|  323         ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source
     ) |  385         repo_cloned = ensure_repo(repodir, parenttype, target, vcs, | 
|  324         update_repo(target, vcs, rev) |  386                                   _root.get(vcs, ''), source) | 
 |  387         repo_updated = update_repo(target, vcs, rev) | 
 |  388         recent_npm_failed = os.path.exists(os.path.join(target, NPM_LOCKFILE)) | 
 |  389         if repo_cloned or repo_updated or recent_npm_failed: | 
 |  390             resolve_npm_dependencies(target, vcs) | 
|  325         resolve_deps(target, level + 1, self_update=False, |  391         resolve_deps(target, level + 1, self_update=False, | 
|  326                      overrideroots=overrideroots, skipdependencies=skipdependenc
     ies) |  392                      overrideroots=overrideroots, skipdependencies=skipdependenc
     ies) | 
|  327  |  393  | 
|  328     if self_update and '_self' in config and '*' in config['_self']: |  394     if self_update and '_self' in config and '*' in config['_self']: | 
|  329         source = safe_join(repodir, config['_self']['*']) |  395         source = safe_join(repodir, config['_self']['*']) | 
|  330         try: |  396         try: | 
|  331             with io.open(source, 'rb') as handle: |  397             with io.open(source, 'rb') as handle: | 
|  332                 sourcedata = handle.read() |  398                 sourcedata = handle.read() | 
|  333         except IOError as e: |  399         except IOError as e: | 
|  334             if e.errno != errno.ENOENT: |  400             if e.errno != errno.ENOENT: | 
| (...skipping 20 matching lines...) Expand all  Loading... | 
|  355     with open(path, 'a+') as f: |  421     with open(path, 'a+') as f: | 
|  356         f.seek(0, os.SEEK_SET) |  422         f.seek(0, os.SEEK_SET) | 
|  357         file_content = [l.strip() for l in f.readlines()] |  423         file_content = [l.strip() for l in f.readlines()] | 
|  358         if not pattern in file_content: |  424         if not pattern in file_content: | 
|  359             file_content.append(pattern) |  425             file_content.append(pattern) | 
|  360             f.seek(0, os.SEEK_SET) |  426             f.seek(0, os.SEEK_SET) | 
|  361             f.truncate() |  427             f.truncate() | 
|  362             for l in file_content: |  428             for l in file_content: | 
|  363                 print >>f, l |  429                 print >>f, l | 
|  364  |  430  | 
 |  431  | 
|  365 if __name__ == '__main__': |  432 if __name__ == '__main__': | 
|  366     logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) |  433     logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) | 
|  367  |  434  | 
|  368     parser = argparse.ArgumentParser(description='Verify dependencies for a set 
     of repositories, by default the repository of this script.') |  435     parser = argparse.ArgumentParser(description='Verify dependencies for a set 
     of repositories, by default the repository of this script.') | 
|  369     parser.add_argument('repos', metavar='repository', type=str, nargs='*', help
     ='Repository path') |  436     parser.add_argument('repos', metavar='repository', type=str, nargs='*', help
     ='Repository path') | 
|  370     parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf
     ormational output') |  437     parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf
     ormational output') | 
|  371     args = parser.parse_args() |  438     args = parser.parse_args() | 
|  372  |  439  | 
|  373     if args.quiet: |  440     if args.quiet: | 
|  374         logging.disable(logging.INFO) |  441         logging.disable(logging.INFO) | 
|  375  |  442  | 
|  376     repos = args.repos |  443     repos = args.repos | 
|  377     if not len(repos): |  444     if not len(repos): | 
|  378         repos = [os.path.dirname(__file__)] |  445         repos = [os.path.dirname(__file__)] | 
|  379     for repo in repos: |  446     for repo in repos: | 
|  380         resolve_deps(repo) |  447         resolve_deps(repo) | 
| OLD | NEW |