Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29589691: Issue 5757 - Update buildtools dependency, (re)move legacy extensions (Closed)
Patch Set: Created Oct. 26, 2017, 10:50 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
Vasily Kuznetsov 2017/11/30 17:34:00 I have not properly reviewed this script, assuming
tlucas 2017/11/30 18:15:26 Yes, this is the result of ensure_dependencies.py'
2 2
3 # This Source Code Form is subject to the terms of the Mozilla Public 3 # This Source Code Form is subject to the terms of the Mozilla Public
4 # License, v. 2.0. If a copy of the MPL was not distributed with this 4 # License, v. 2.0. If a copy of the MPL was not distributed with this
5 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
6 6
7 import sys 7 import sys
8 import os 8 import os
9 import posixpath 9 import posixpath
10 import re 10 import re
11 import io 11 import io
12 import errno 12 import errno
13 import logging 13 import logging
14 import subprocess 14 import subprocess
15 import urlparse 15 import urlparse
16 import argparse 16 import argparse
17 import json
17 18
18 from collections import OrderedDict 19 from collections import OrderedDict
19 from ConfigParser import RawConfigParser 20 from ConfigParser import RawConfigParser
20 21
21 USAGE = ''' 22 USAGE = '''
22 A dependencies file should look like this: 23 A dependencies file should look like this:
23 24
24 # VCS-specific root URLs for the repositories 25 # VCS-specific root URLs for the repositories
25 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
26 # File to update this script from (optional) 27 # File to update this script from (optional)
27 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
28 # Clone elemhidehelper repository into extensions/elemhidehelper directory at 29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
29 # tag "1.2". 30 # tag "1.2".
30 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
31 # Clone buildtools repository into buildtools directory at VCS-specific 32 # Clone buildtools repository into buildtools directory at VCS-specific
32 # revision IDs. 33 # revision IDs.
33 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
34 # Clone the adblockplus repository into adblockplus directory, overwriting the 35 # Clone the adblockplus repository into adblockplus directory, overwriting the
35 # usual source URL for Git repository and specifying VCS specific revision IDs . 36 # usual source URL for Git repository and specifying VCS specific revision IDs .
36 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b 37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
37 # Clone the adblockpluschrome repository into the adblockpluschrome directory, 38 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
38 # from a specific Git repository, specifying the revision ID. 39 # from a specific Git repository, specifying the revision ID.
39 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
40 ''' 41 '''
41 42
42 SKIP_DEPENDENCY_UPDATES = os.environ.get( 43 SKIP_DEPENDENCY_UPDATES = os.environ.get(
43 'SKIP_DEPENDENCY_UPDATES', '' 44 'SKIP_DEPENDENCY_UPDATES', ''
44 ).lower() not in ('', '0', 'false') 45 ).lower() not in ('', '0', 'false')
45 46
47 NPM_LOCKFILE = '.npm_install_lock'
48
46 49
47 class Mercurial(): 50 class Mercurial():
48 def istype(self, repodir): 51 def istype(self, repodir):
49 return os.path.exists(os.path.join(repodir, '.hg')) 52 return os.path.exists(os.path.join(repodir, '.hg'))
50 53
51 def clone(self, source, target): 54 def clone(self, source, target):
52 if not source.endswith('/'): 55 if not source.endswith('/'):
53 source += '/' 56 source += '/'
54 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget]) 57 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget])
55 58
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
128 module = os.path.sep + os.path.relpath(target, repo) 131 module = os.path.sep + os.path.relpath(target, repo)
129 exclude_file = os.path.join(repo, '.git', 'info', 'exclude') 132 exclude_file = os.path.join(repo, '.git', 'info', 'exclude')
130 _ensure_line_exists(exclude_file, module) 133 _ensure_line_exists(exclude_file, module)
131 134
132 def postprocess_url(self, url): 135 def postprocess_url(self, url):
133 # Handle alternative syntax of SSH URLS 136 # Handle alternative syntax of SSH URLS
134 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme: 137 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme:
135 return 'ssh://' + url.replace(':', '/', 1) 138 return 'ssh://' + url.replace(':', '/', 1)
136 return url 139 return url
137 140
141
138 repo_types = OrderedDict(( 142 repo_types = OrderedDict((
139 ('hg', Mercurial()), 143 ('hg', Mercurial()),
140 ('git', Git()), 144 ('git', Git()),
141 )) 145 ))
142 146
143 # [vcs:]value 147 # [vcs:]value
144 item_regexp = re.compile( 148 item_regexp = re.compile(
145 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?' 149 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?'
146 '(.+)$' 150 '(.+)$'
147 ) 151 )
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
238 return os.path.join(path, *normpath.split(posixpath.sep)) 242 return os.path.join(path, *normpath.split(posixpath.sep))
239 243
240 244
241 def get_repo_type(repo): 245 def get_repo_type(repo):
242 for name, repotype in repo_types.iteritems(): 246 for name, repotype in repo_types.iteritems():
243 if repotype.istype(repo): 247 if repotype.istype(repo):
244 return name 248 return name
245 return 'hg' 249 return 'hg'
246 250
247 251
252 def resolve_npm_dependencies(target, vcs):
253 """Install Node.js production-only dependencies if necessary and desired.
254
255 When the target dependency has additional Node.js dependencies declared
256 run "npm install --only=production --loglevel=warn" to resolve the declared
257 dependencies.
258
259 Additionally, make sure that any VCS will ignore the installed files.
260
261 Requires Node.js to be installed locally.
262 """
263 try:
264 with open(os.path.join(target, 'package.json'), 'r') as fp:
265 package_data = json.load(fp)
266
267 # In case a package.json does not exist at all or if there are no
268 # production dependencies declared, we don't need to run npm and can
269 # bail out early.
270 if not package_data.get('dependencies', False):
271 return
272 except IOError:
273 return
274
275 try:
276 # Create an empty file, which gets deleted after successfully
277 # installing Node.js dependencies.
278 lockfile_path = os.path.join(target, NPM_LOCKFILE)
279 open(lockfile_path, 'a').close()
280
281 cmd = ['npm', 'install', '--only=production', '--loglevel=warn',
282 '--no-package-lock', '--no-optional']
283 subprocess.check_output(cmd, cwd=target)
284
285 repo_types[vcs].ignore(os.path.join(target, NPM_LOCKFILE), target)
286 repo_types[vcs].ignore(os.path.join(target, 'node_modules'), target)
287
288 os.remove(lockfile_path)
289 except OSError as e:
290 import errno
291 if e.errno == errno.ENOENT:
292 logging.error('Failed to install Node.js dependencies for %s,'
293 ' please ensure Node.js is installed.', target)
294 else:
295 raise
296
297
248 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): 298 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
249 if os.path.exists(target): 299 if os.path.exists(target):
250 return 300 return False
251 301
252 if SKIP_DEPENDENCY_UPDATES: 302 if SKIP_DEPENDENCY_UPDATES:
253 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' 303 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
254 '%s not cloned', target) 304 '%s not cloned', target)
255 return 305 return False
256 306
257 postprocess_url = repo_types[type].postprocess_url 307 postprocess_url = repo_types[type].postprocess_url
258 root = postprocess_url(root) 308 root = postprocess_url(root)
259 sourcename = postprocess_url(sourcename) 309 sourcename = postprocess_url(sourcename)
260 310
261 if os.path.exists(root): 311 if os.path.exists(root):
262 url = os.path.join(root, sourcename) 312 url = os.path.join(root, sourcename)
263 else: 313 else:
264 url = urlparse.urljoin(root, sourcename) 314 url = urlparse.urljoin(root, sourcename)
265 315
266 logging.info('Cloning repository %s into %s' % (url, target)) 316 logging.info('Cloning repository %s into %s' % (url, target))
267 repo_types[type].clone(url, target) 317 repo_types[type].clone(url, target)
268 repo_types[parenttype].ignore(target, parentrepo) 318 repo_types[parenttype].ignore(target, parentrepo)
319 return True
269 320
270 321
271 def update_repo(target, type, revision): 322 def update_repo(target, type, revision):
272 resolved_revision = repo_types[type].get_revision_id(target, revision) 323 resolved_revision = repo_types[type].get_revision_id(target, revision)
273 current_revision = repo_types[type].get_revision_id(target) 324 current_revision = repo_types[type].get_revision_id(target)
274 325
275 if resolved_revision != current_revision: 326 if resolved_revision != current_revision:
276 if SKIP_DEPENDENCY_UPDATES: 327 if SKIP_DEPENDENCY_UPDATES:
277 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, ' 328 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
278 '%s not checked out to %s', target, revision) 329 '%s not checked out to %s', target, revision)
279 return 330 return False
280 331
281 if not resolved_revision: 332 if not resolved_revision:
282 logging.info('Revision %s is unknown, downloading remote changes' % revision) 333 logging.info('Revision %s is unknown, downloading remote changes' % revision)
283 repo_types[type].pull(target) 334 repo_types[type].pull(target)
284 resolved_revision = repo_types[type].get_revision_id(target, revisio n) 335 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
285 if not resolved_revision: 336 if not resolved_revision:
286 raise Exception('Failed to resolve revision %s' % revision) 337 raise Exception('Failed to resolve revision %s' % revision)
287 338
288 logging.info('Updating repository %s to revision %s' % (target, resolved _revision)) 339 logging.info('Updating repository %s to revision %s' % (target, resolved _revision))
289 repo_types[type].update(target, resolved_revision, revision) 340 repo_types[type].update(target, resolved_revision, revision)
341 return True
342 return False
290 343
291 344
292 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 345 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
293 config = read_deps(repodir) 346 config = read_deps(repodir)
294 if config is None: 347 if config is None:
295 if level == 0: 348 if level == 0:
296 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE)) 349 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE))
297 return 350 return
298 if level >= 10: 351 if level >= 10:
299 logging.warning('Too much subrepository nesting, ignoring %s' % repo) 352 logging.warning('Too much subrepository nesting, ignoring %s' % repo)
300 return 353 return
301 354
302 if overrideroots is not None: 355 if overrideroots is not None:
303 config['_root'] = overrideroots 356 config['_root'] = overrideroots
304 357
305 for dir, sources in config.iteritems(): 358 for dir, sources in sorted(config.iteritems()):
306 if (dir.startswith('_') or 359 if (dir.startswith('_') or
307 skipdependencies.intersection([s[0] for s in sources if s[0]])): 360 skipdependencies.intersection([s[0] for s in sources if s[0]])):
308 continue 361 continue
309 362
310 target = safe_join(repodir, dir) 363 target = safe_join(repodir, dir)
311 parenttype = get_repo_type(repodir) 364 parenttype = get_repo_type(repodir)
312 _root = config.get('_root', {}) 365 _root = config.get('_root', {})
313 366
314 for key in sources.keys() + _root.keys(): 367 for key in sources.keys() + _root.keys():
315 if key == parenttype or key is None and vcs != '*': 368 if key == parenttype or key is None and vcs != '*':
316 vcs = key 369 vcs = key
317 source, rev = merge_seqs(sources.get('*'), sources.get(vcs)) 370 source, rev = merge_seqs(sources.get('*'), sources.get(vcs))
318 371
319 if not (vcs and source and rev): 372 if not (vcs and source and rev):
320 logging.warning('No valid source / revision found to create %s' % ta rget) 373 logging.warning('No valid source / revision found to create %s' % ta rget)
321 continue 374 continue
322 375
323 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source ) 376 repo_cloned = ensure_repo(repodir, parenttype, target, vcs,
324 update_repo(target, vcs, rev) 377 _root.get(vcs, ''), source)
378 repo_updated = update_repo(target, vcs, rev)
379 recent_npm_failed = os.path.exists(os.path.join(target, NPM_LOCKFILE))
380 if repo_cloned or repo_updated or recent_npm_failed:
381 resolve_npm_dependencies(target, vcs)
325 resolve_deps(target, level + 1, self_update=False, 382 resolve_deps(target, level + 1, self_update=False,
326 overrideroots=overrideroots, skipdependencies=skipdependenc ies) 383 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
327 384
328 if self_update and '_self' in config and '*' in config['_self']: 385 if self_update and '_self' in config and '*' in config['_self']:
329 source = safe_join(repodir, config['_self']['*']) 386 source = safe_join(repodir, config['_self']['*'])
330 try: 387 try:
331 with io.open(source, 'rb') as handle: 388 with io.open(source, 'rb') as handle:
332 sourcedata = handle.read() 389 sourcedata = handle.read()
333 except IOError as e: 390 except IOError as e:
334 if e.errno != errno.ENOENT: 391 if e.errno != errno.ENOENT:
(...skipping 20 matching lines...) Expand all
355 with open(path, 'a+') as f: 412 with open(path, 'a+') as f:
356 f.seek(0, os.SEEK_SET) 413 f.seek(0, os.SEEK_SET)
357 file_content = [l.strip() for l in f.readlines()] 414 file_content = [l.strip() for l in f.readlines()]
358 if not pattern in file_content: 415 if not pattern in file_content:
359 file_content.append(pattern) 416 file_content.append(pattern)
360 f.seek(0, os.SEEK_SET) 417 f.seek(0, os.SEEK_SET)
361 f.truncate() 418 f.truncate()
362 for l in file_content: 419 for l in file_content:
363 print >>f, l 420 print >>f, l
364 421
422
365 if __name__ == '__main__': 423 if __name__ == '__main__':
366 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 424 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
367 425
368 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.') 426 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.')
369 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path') 427 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path')
370 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output') 428 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output')
371 args = parser.parse_args() 429 args = parser.parse_args()
372 430
373 if args.quiet: 431 if args.quiet:
374 logging.disable(logging.INFO) 432 logging.disable(logging.INFO)
375 433
376 repos = args.repos 434 repos = args.repos
377 if not len(repos): 435 if not len(repos):
378 repos = [os.path.dirname(__file__)] 436 repos = [os.path.dirname(__file__)]
379 for repo in repos: 437 for repo in repos:
380 resolve_deps(repo) 438 resolve_deps(repo)
OLDNEW

Powered by Google App Engine
This is Rietveld