Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29370933: Issue 4503 - ensure_dependencies.py fails for git submodule
Patch Set: Just updated to hg:3e083509a284 / git:f4957fd Created Jan. 9, 2017, 2:44 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « dependencies ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8
3 2
4 # This Source Code Form is subject to the terms of the Mozilla Public 3 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 4 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 6
8 import sys 7 import sys
9 import os 8 import os
10 import posixpath 9 import posixpath
11 import re 10 import re
12 import io 11 import io
13 import errno 12 import errno
14 import logging 13 import logging
15 import subprocess 14 import subprocess
16 import urlparse 15 import urlparse
17 import argparse 16 import argparse
18 17
19 from collections import OrderedDict 18 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 19 from ConfigParser import RawConfigParser
21 20
22 USAGE = """ 21 USAGE = '''
23 A dependencies file should look like this: 22 A dependencies file should look like this:
24 23
25 # VCS-specific root URLs for the repositories 24 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 25 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 26 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 27 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory 28 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # at tag "1.2". 29 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 30 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific 31 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 32 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 33 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 """ 34 # Clone the adblockplus repository into adblockplus directory, overwriting the
35 # usual source URL for Git repository and specifying VCS specific revision IDs .
36 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
37 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
38 # from a specific Git repository, specifying the revision ID.
39 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
40 '''
41
42 SKIP_DEPENDENCY_UPDATES = os.environ.get(
43 'SKIP_DEPENDENCY_UPDATES', ''
44 ).lower() not in ('', '0', 'false')
45
36 46
37 class Mercurial(): 47 class Mercurial():
38 def istype(self, repodir): 48 def istype(self, repodir):
39 return os.path.exists(os.path.join(repodir, ".hg")) 49 return os.path.exists(os.path.join(repodir, '.hg'))
40 50
41 def clone(self, source, target): 51 def clone(self, source, target):
42 if not source.endswith("/"): 52 if not source.endswith('/'):
43 source += "/" 53 source += '/'
44 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t]) 54 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget])
45 55
46 def get_revision_id(self, repo, rev=None): 56 def get_revision_id(self, repo, rev=None):
47 command = ["hg", "id", "--repository", repo, "--id"] 57 command = ['hg', 'id', '--repository', repo, '--id']
48 if rev: 58 if rev:
49 command.extend(["--rev", rev]) 59 command.extend(['--rev', rev])
50 60
51 # Ignore stderr output and return code here: if revision lookup failed we 61 # Ignore stderr output and return code here: if revision lookup failed w e
52 # should simply return an empty string. 62 # should simply return an empty string.
53 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0] 63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
54 return result.strip() 64 return result.strip()
55 65
56 def pull(self, repo): 66 def pull(self, repo):
57 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 67 subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet'])
58 68
59 def update(self, repo, rev): 69 def update(self, repo, rev, revname):
60 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev]) 70 subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', '--check', '--rev', rev])
61 71
62 def ignore(self, target, repo): 72 def ignore(self, target, repo):
63 73 config_path = os.path.join(repo, '.hg', 'hgrc')
64 if not self.istype(target): 74 ignore_file = os.path.join('.hg', 'dependencies')
65 75 ignore_path = os.path.join(repo, ignore_file)
66 config_path = os.path.join(repo, ".hg", "hgrc") 76
67 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies")) 77 config = RawConfigParser()
68 78 config.read(config_path)
69 config = RawConfigParser() 79
70 config.read(config_path) 80 if not config.has_section('ui'):
71 81 config.add_section('ui')
72 if not config.has_section("ui"): 82
73 config.add_section("ui") 83 config.set('ui', 'ignore.dependencies', ignore_file)
74 84 with open(config_path, 'w') as stream:
75 config.set("ui", "ignore.dependencies", ignore_path) 85 config.write(stream)
76 with open(config_path, "w") as stream: 86
77 config.write(stream) 87 module = os.path.relpath(target, repo)
78 88 _ensure_line_exists(ignore_path, module)
79 module = os.path.relpath(target, repo) 89
80 _ensure_line_exists(ignore_path, module) 90 def postprocess_url(self, url):
91 return url
92
81 93
82 class Git(): 94 class Git():
83 def istype(self, repodir): 95 def istype(self, repodir):
84 return os.path.exists(os.path.join(repodir, ".git")) 96 return os.path.exists(os.path.join(repodir, '.git'))
85 97
86 def clone(self, source, target): 98 def clone(self, source, target):
87 source = source.rstrip("/") 99 source = source.rstrip('/')
88 if not source.endswith(".git"): 100 if not source.endswith('.git'):
89 source += ".git" 101 source += '.git'
90 subprocess.check_call(["git", "clone", "--quiet", source, target]) 102 subprocess.check_call(['git', 'clone', '--quiet', source, target])
91 103
92 def get_revision_id(self, repo, rev="HEAD"): 104 def get_revision_id(self, repo, rev='HEAD'):
93 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 105 command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}']
94 return subprocess.check_output(command, cwd=repo).strip() 106 return subprocess.check_output(command, cwd=repo).strip()
95 107
96 def pull(self, repo): 108 def pull(self, repo):
97 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po) 109 # Fetch tracked branches, new tags and the list of available remote bran ches
98 110 subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw d=repo)
99 def update(self, repo, rev): 111 # Next we need to ensure all remote branches are tracked
100 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) 112 newly_tracked = False
101 113 remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re po)
102 def ignore(self, target, repo): 114 for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M):
103 module = os.path.relpath(target, repo) 115 remote, local = match.groups()
104 exclude_file = os.path.join(repo, ".git", "info", "exclude") 116 with open(os.devnull, 'wb') as devnull:
105 _ensure_line_exists(exclude_file, module) 117 if subprocess.call(['git', 'branch', '--track', local, remote],
118 cwd=repo, stdout=devnull, stderr=devnull) == 0:
119 newly_tracked = True
120 # Finally fetch any newly tracked remote branches
121 if newly_tracked:
122 subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep o)
123
124 def update(self, repo, rev, revname):
125 subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo)
126
127 def ignore(self, target, repo):
128 module = os.path.sep + os.path.relpath(target, repo)
129 exclude_file = os.path.join(repo, '.git', 'info', 'exclude')
130 _ensure_line_exists(exclude_file, module)
131
132 def postprocess_url(self, url):
133 # Handle alternative syntax of SSH URLS
134 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme:
135 return 'ssh://' + url.replace(':', '/', 1)
136 return url
106 137
107 repo_types = OrderedDict(( 138 repo_types = OrderedDict((
108 ("hg", Mercurial()), 139 ('hg', Mercurial()),
109 ("git", Git()), 140 ('git', Git()),
110 )) 141 ))
111 142
143 # [vcs:]value
144 item_regexp = re.compile(
145 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?'
146 '(.+)$'
147 )
148
149 # [url@]rev
150 source_regexp = re.compile(
151 '^(?:(.*)@)?'
152 '(.+)$'
153 )
154
155
156 def merge_seqs(seq1, seq2):
157 """Return a list of any truthy values from the suplied sequences
158
159 (None, 2), (1,) => [1, 2]
160 None, (1, 2) => [1, 2]
161 (1, 2), (3, 4) => [3, 4]
162 """
163 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
164
165
112 def parse_spec(path, line): 166 def parse_spec(path, line):
113 if "=" not in line: 167 if '=' not in line:
114 logging.warning("Invalid line in file %s: %s" % (path, line)) 168 logging.warning('Invalid line in file %s: %s' % (path, line))
115 return None, None 169 return None, None
116 170
117 key, value = line.split("=", 1) 171 key, value = line.split('=', 1)
118 key = key.strip() 172 key = key.strip()
119 items = value.split() 173 items = value.split()
120 if not len(items): 174 if not len(items):
121 logging.warning("No value specified for key %s in file %s" % (key, path)) 175 logging.warning('No value specified for key %s in file %s' % (key, path) )
122 return key, None 176 return key, None
123 177
124 result = OrderedDict() 178 result = OrderedDict()
125 if not key.startswith("_"): 179 is_dependency_field = not key.startswith('_')
126 result["_source"] = items.pop(0) 180
127 181 for i, item in enumerate(items):
128 for item in items: 182 try:
129 if ":" in item: 183 vcs, value = re.search(item_regexp, item).groups()
130 type, value = item.split(":", 1) 184 vcs = vcs or '*'
185 if is_dependency_field:
186 if i == 0 and vcs == '*':
187 # In order to be backwards compatible we have to assume that the first
188 # source contains only a URL/path for the repo if it does no t contain
189 # the VCS part
190 url_rev = (value, None)
191 else:
192 url_rev = re.search(source_regexp, value).groups()
193 result[vcs] = merge_seqs(result.get(vcs), url_rev)
194 else:
195 if vcs in result:
196 logging.warning('Ignoring duplicate value for type %r '
197 '(key %r in file %r)' % (vcs, key, path))
198 result[vcs] = value
199 except AttributeError:
200 logging.warning('Ignoring invalid item %r for type %r '
201 '(key %r in file %r)' % (item, vcs, key, path))
202 continue
203 return key, result
204
205
206 def read_deps(repodir):
207 result = {}
208 deps_path = os.path.join(repodir, 'dependencies')
209 try:
210 with io.open(deps_path, 'rt', encoding='utf-8') as handle:
211 for line in handle:
212 # Remove comments and whitespace
213 line = re.sub(r'#.*', '', line).strip()
214 if not line:
215 continue
216
217 key, spec = parse_spec(deps_path, line)
218 if spec:
219 result[key] = spec
220 return result
221 except IOError as e:
222 if e.errno != errno.ENOENT:
223 raise
224 return None
225
226
227 def safe_join(path, subpath):
228 # This has been inspired by Flask's safe_join() function
229 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
230 if any(sep in subpath for sep in forbidden):
231 raise Exception('Illegal directory separator in dependency path %s' % su bpath)
232
233 normpath = posixpath.normpath(subpath)
234 if posixpath.isabs(normpath):
235 raise Exception('Dependency path %s cannot be absolute' % subpath)
236 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
237 raise Exception('Dependency path %s has to be inside the repository' % s ubpath)
238 return os.path.join(path, *normpath.split(posixpath.sep))
239
240
241 def get_repo_type(repo):
242 for name, repotype in repo_types.iteritems():
243 if repotype.istype(repo):
244 return name
245 return 'hg'
246
247
248 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
249 if os.path.exists(target):
250 return
251
252 if SKIP_DEPENDENCY_UPDATES:
253 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
254 '%s not cloned', target)
255 return
256
257 postprocess_url = repo_types[type].postprocess_url
258 root = postprocess_url(root)
259 sourcename = postprocess_url(sourcename)
260
261 if os.path.exists(root):
262 url = os.path.join(root, sourcename)
131 else: 263 else:
132 type, value = ("*", item) 264 url = urlparse.urljoin(root, sourcename)
133 if type in result: 265
134 logging.warning("Ignoring duplicate value for type %s (key %s in file %s)" % (type, key, path)) 266 logging.info('Cloning repository %s into %s' % (url, target))
135 else: 267 repo_types[type].clone(url, target)
136 result[type] = value 268 repo_types[parenttype].ignore(target, parentrepo)
137 return key, result 269
138 270
139 def read_deps(repodir): 271 def update_repo(target, type, revision):
140 result = {}
141 deps_path = os.path.join(repodir, "dependencies")
142 try:
143 with io.open(deps_path, "rt", encoding="utf-8") as handle:
144 for line in handle:
145 # Remove comments and whitespace
146 line = re.sub(r"#.*", "", line).strip()
147 if not line:
148 continue
149
150 key, spec = parse_spec(deps_path, line)
151 if spec:
152 result[key] = spec
153 return result
154 except IOError, e:
155 if e.errno != errno.ENOENT:
156 raise
157 return None
158
159 def safe_join(path, subpath):
160 # This has been inspired by Flask's safe_join() function
161 forbidden = set([os.sep, os.altsep]) - set([posixpath.sep, None])
162 if any(sep in subpath for sep in forbidden):
163 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
164
165 normpath = posixpath.normpath(subpath)
166 if posixpath.isabs(normpath):
167 raise Exception("Dependency path %s cannot be absolute" % subpath)
168 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
169 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
170 return os.path.join(path, *normpath.split(posixpath.sep))
171
172 def get_repo_type(repo):
173 for name, repotype in repo_types.iteritems():
174 if repotype.istype(repo):
175 return name
176 return None
177
178 def ensure_repo(parentrepo, target, roots, sourcename):
179 if os.path.exists(target):
180 return
181
182 parenttype = get_repo_type(parentrepo)
183 type = None
184 for key in roots:
185 if key == parenttype or (key in repo_types and type is None):
186 type = key
187 if type is None:
188 raise Exception("No valid source found to create %s" % target)
189
190 if os.path.exists(roots[type]):
191 url = os.path.join(roots[type], sourcename)
192 else:
193 url = urlparse.urljoin(roots[type], sourcename)
194
195 logging.info("Cloning repository %s into %s" % (url, target))
196 repo_types[type].clone(url, target)
197
198 for repo in repo_types.itervalues():
199 if repo.istype(parentrepo):
200 repo.ignore(target, parentrepo)
201
202 def update_repo(target, revisions):
203 type = get_repo_type(target)
204 if type is None:
205 logging.warning("Type of repository %s unknown, skipping update" % target)
206 return
207
208 if type in revisions:
209 revision = revisions[type]
210 elif "*" in revisions:
211 revision = revisions["*"]
212 else:
213 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
214 return
215
216 resolved_revision = repo_types[type].get_revision_id(target, revision)
217 if not resolved_revision:
218 logging.info("Revision %s is unknown, downloading remote changes" % revision )
219 repo_types[type].pull(target)
220 resolved_revision = repo_types[type].get_revision_id(target, revision) 272 resolved_revision = repo_types[type].get_revision_id(target, revision)
221 if not resolved_revision: 273 current_revision = repo_types[type].get_revision_id(target)
222 raise Exception("Failed to resolve revision %s" % revision) 274
223 275 if resolved_revision != current_revision:
224 current_revision = repo_types[type].get_revision_id(target) 276 if SKIP_DEPENDENCY_UPDATES:
225 if resolved_revision != current_revision: 277 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
226 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision)) 278 '%s not checked out to %s', target, revision)
227 repo_types[type].update(target, resolved_revision) 279 return
280
281 if not resolved_revision:
282 logging.info('Revision %s is unknown, downloading remote changes' % revision)
283 repo_types[type].pull(target)
284 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
285 if not resolved_revision:
286 raise Exception('Failed to resolve revision %s' % revision)
287
288 logging.info('Updating repository %s to revision %s' % (target, resolved _revision))
289 repo_types[type].update(target, resolved_revision, revision)
290
228 291
229 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 292 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
230 config = read_deps(repodir) 293 config = read_deps(repodir)
231 if config is None: 294 if config is None:
232 if level == 0: 295 if level == 0:
233 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 296 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE))
234 return 297 return
235 if level >= 10: 298 if level >= 10:
236 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 299 logging.warning('Too much subrepository nesting, ignoring %s' % repo)
237 300 return
238 if overrideroots is not None: 301
239 config["_root"] = overrideroots 302 if overrideroots is not None:
240 303 config['_root'] = overrideroots
241 for dir, revisions in config.iteritems(): 304
242 if dir.startswith("_") or revisions["_source"] in skipdependencies: 305 for dir, sources in config.iteritems():
243 continue 306 if (dir.startswith('_') or
244 target = safe_join(repodir, dir) 307 skipdependencies.intersection([s[0] for s in sources if s[0]])):
245 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) 308 continue
246 update_repo(target, revisions) 309
247 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies) 310 target = safe_join(repodir, dir)
248 311 parenttype = get_repo_type(repodir)
249 if self_update and "_self" in config and "*" in config["_self"]: 312 _root = config.get('_root', {})
250 source = safe_join(repodir, config["_self"]["*"]) 313
251 try: 314 for key in sources.keys() + _root.keys():
252 with io.open(source, "rb") as handle: 315 if key == parenttype or key is None and vcs != '*':
253 sourcedata = handle.read() 316 vcs = key
254 except IOError, e: 317 source, rev = merge_seqs(sources.get('*'), sources.get(vcs))
255 if e.errno != errno.ENOENT: 318
256 raise 319 if not (vcs and source and rev):
257 logging.warning("File %s doesn't exist, skipping self-update" % source) 320 logging.warning('No valid source / revision found to create %s' % ta rget)
258 return 321 continue
259 322
260 target = __file__ 323 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source )
261 with io.open(target, "rb") as handle: 324 update_repo(target, vcs, rev)
262 targetdata = handle.read() 325 resolve_deps(target, level + 1, self_update=False,
263 326 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
264 if sourcedata != targetdata: 327
265 logging.info("Updating %s from %s, don't forget to commit" % (source, targ et)) 328 if self_update and '_self' in config and '*' in config['_self']:
266 with io.open(target, "wb") as handle: 329 source = safe_join(repodir, config['_self']['*'])
267 handle.write(sourcedata) 330 try:
268 if __name__ == "__main__": 331 with io.open(source, 'rb') as handle:
269 logging.info("Restarting %s" % target) 332 sourcedata = handle.read()
270 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]) 333 except IOError as e:
271 else: 334 if e.errno != errno.ENOENT:
272 logging.warning("Cannot restart %s automatically, please rerun" % target ) 335 raise
336 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
337 return
338
339 target = __file__
340 with io.open(target, 'rb') as handle:
341 targetdata = handle.read()
342
343 if sourcedata != targetdata:
344 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
345 with io.open(target, 'wb') as handle:
346 handle.write(sourcedata)
347 if __name__ == '__main__':
348 logging.info('Restarting %s' % target)
349 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
350 else:
351 logging.warning('Cannot restart %s automatically, please rerun' % target)
352
273 353
274 def _ensure_line_exists(path, pattern): 354 def _ensure_line_exists(path, pattern):
275 with open(path, 'a+') as f: 355 with open(path, 'a+') as f:
276 file_content = [l.strip() for l in f.readlines()] 356 f.seek(0, os.SEEK_SET)
277 if not pattern in file_content: 357 file_content = [l.strip() for l in f.readlines()]
278 file_content.append(pattern) 358 if not pattern in file_content:
279 f.seek(0, os.SEEK_SET) 359 file_content.append(pattern)
280 f.truncate() 360 f.seek(0, os.SEEK_SET)
281 for l in file_content: 361 f.truncate()
282 print >>f, l 362 for l in file_content:
283 363 print >>f, l
284 if __name__ == "__main__": 364
285 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 365 if __name__ == '__main__':
286 366 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
287 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 367
288 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path") 368 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.')
289 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output") 369 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path')
290 args = parser.parse_args() 370 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output')
291 371 args = parser.parse_args()
292 if args.quiet: 372
293 logging.disable(logging.INFO) 373 if args.quiet:
294 374 logging.disable(logging.INFO)
295 repos = args.repos 375
296 if not len(repos): 376 repos = args.repos
297 repos = [os.path.dirname(__file__)] 377 if not len(repos):
298 for repo in repos: 378 repos = [os.path.dirname(__file__)]
299 resolve_deps(repo) 379 for repo in repos:
380 resolve_deps(repo)
OLDNEW
« no previous file with comments | « dependencies ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld