Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29370933: Issue 4503 - ensure_dependencies.py fails for git submodule
Patch Set: Actual fix: now editing actual repo excludes file Created Jan. 9, 2017, 2:54 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« dependencies ('K') | « dependencies ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8
3 2
4 # This Source Code Form is subject to the terms of the Mozilla Public 3 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 4 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 6
8 import sys 7 import sys
9 import os 8 import os
10 import posixpath 9 import posixpath
11 import re 10 import re
12 import io 11 import io
13 import errno 12 import errno
14 import logging 13 import logging
15 import subprocess 14 import subprocess
16 import urlparse 15 import urlparse
17 import argparse 16 import argparse
18 17
19 from collections import OrderedDict 18 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 19 from ConfigParser import RawConfigParser
21 20
22 USAGE = """ 21 USAGE = '''
23 A dependencies file should look like this: 22 A dependencies file should look like this:
24 23
25 # VCS-specific root URLs for the repositories 24 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 25 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 26 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 27 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory 28 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # at tag "1.2". 29 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 30 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific 31 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 32 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 33 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 """ 34 # Clone the adblockplus repository into adblockplus directory, overwriting the
35 # usual source URL for Git repository and specifying VCS specific revision IDs .
36 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
37 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
38 # from a specific Git repository, specifying the revision ID.
39 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
40 '''
41
42 SKIP_DEPENDENCY_UPDATES = os.environ.get(
43 'SKIP_DEPENDENCY_UPDATES', ''
44 ).lower() not in ('', '0', 'false')
45
36 46
37 class Mercurial(): 47 class Mercurial():
38 def istype(self, repodir): 48 def istype(self, repodir):
39 return os.path.exists(os.path.join(repodir, ".hg")) 49 return os.path.exists(os.path.join(repodir, '.hg'))
40 50
41 def clone(self, source, target): 51 def clone(self, source, target):
42 if not source.endswith("/"): 52 if not source.endswith('/'):
43 source += "/" 53 source += '/'
44 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t]) 54 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget])
45 55
46 def get_revision_id(self, repo, rev=None): 56 def get_revision_id(self, repo, rev=None):
47 command = ["hg", "id", "--repository", repo, "--id"] 57 command = ['hg', 'id', '--repository', repo, '--id']
48 if rev: 58 if rev:
49 command.extend(["--rev", rev]) 59 command.extend(['--rev', rev])
50 60
51 # Ignore stderr output and return code here: if revision lookup failed we 61 # Ignore stderr output and return code here: if revision lookup failed w e
52 # should simply return an empty string. 62 # should simply return an empty string.
53 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0] 63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
54 return result.strip() 64 return result.strip()
55 65
56 def pull(self, repo): 66 def pull(self, repo):
57 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 67 subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet'])
58 68
59 def update(self, repo, rev): 69 def update(self, repo, rev, revname):
60 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev]) 70 subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', '--check', '--rev', rev])
61 71
62 def ignore(self, target, repo): 72 def ignore(self, target, repo):
63 73 config_path = os.path.join(repo, '.hg', 'hgrc')
64 if not self.istype(target): 74 ignore_file = os.path.join('.hg', 'dependencies')
65 75 ignore_path = os.path.join(repo, ignore_file)
66 config_path = os.path.join(repo, ".hg", "hgrc") 76
67 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies")) 77 config = RawConfigParser()
68 78 config.read(config_path)
69 config = RawConfigParser() 79
70 config.read(config_path) 80 if not config.has_section('ui'):
71 81 config.add_section('ui')
72 if not config.has_section("ui"): 82
73 config.add_section("ui") 83 config.set('ui', 'ignore.dependencies', ignore_file)
74 84 with open(config_path, 'w') as stream:
75 config.set("ui", "ignore.dependencies", ignore_path) 85 config.write(stream)
76 with open(config_path, "w") as stream: 86
77 config.write(stream) 87 module = os.path.relpath(target, repo)
78 88 _ensure_line_exists(ignore_path, module)
89
90 def postprocess_url(self, url):
91 return url
92
93
94 class Git():
95 def istype(self, repodir):
96 return os.path.exists(os.path.join(repodir, '.git'))
97
98 def clone(self, source, target):
99 source = source.rstrip('/')
100 if not source.endswith('.git'):
101 source += '.git'
102 subprocess.check_call(['git', 'clone', '--quiet', source, target])
103
104 def get_revision_id(self, repo, rev='HEAD'):
105 command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}']
106 return subprocess.check_output(command, cwd=repo).strip()
107
108 def pull(self, repo):
109 # Fetch tracked branches, new tags and the list of available remote bran ches
110 subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw d=repo)
111 # Next we need to ensure all remote branches are tracked
112 newly_tracked = False
113 remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re po)
114 for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M):
115 remote, local = match.groups()
116 with open(os.devnull, 'wb') as devnull:
117 if subprocess.call(['git', 'branch', '--track', local, remote],
118 cwd=repo, stdout=devnull, stderr=devnull) == 0:
119 newly_tracked = True
120 # Finally fetch any newly tracked remote branches
121 if newly_tracked:
122 subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep o)
123
124 def update(self, repo, rev, revname):
125 subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo)
126
127 def parse_repo_path(self, dot_git_path):
128 # .git for submodule is a file with actual repo path like so:
129 # gitdir: ../.git/some_path/some_other_path
130 with open(dot_git_path, 'r') as f:
131 return f.read().replace('\n', '')[8:] # skip 'gitdir: ' (8 first charact ers)
132
133 def ignore(self, target, repo):
79 module = os.path.relpath(target, repo) 134 module = os.path.relpath(target, repo)
80 _ensure_line_exists(ignore_path, module) 135 dot_git_path = os.path.join(repo, ".git")
81 136 exclude_file = os.path.join(dot_git_path, "info", "exclude")
82 class Git(): 137 if os.path.isfile(dot_git_path):
83 def istype(self, repodir): 138 logging.warning("%s seems to be Git submodule" % dot_git_path)
84 return os.path.exists(os.path.join(repodir, ".git")) 139 new_repo = self.parse_repo_path(dot_git_path)
85 140 logging.warning('actual repo path is %s' % new_repo)
86 def clone(self, source, target): 141 exclude_file = os.path.join(new_repo, "info", "exclude")
87 source = source.rstrip("/") 142 _ensure_line_exists(exclude_file, module)
88 if not source.endswith(".git"): 143
89 source += ".git" 144 def postprocess_url(self, url):
90 subprocess.check_call(["git", "clone", "--quiet", source, target]) 145 # Handle alternative syntax of SSH URLS
91 146 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme:
92 def get_revision_id(self, repo, rev="HEAD"): 147 return 'ssh://' + url.replace(':', '/', 1)
93 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 148 return url
94 return subprocess.check_output(command, cwd=repo).strip()
95
96 def pull(self, repo):
97 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po)
98
99 def update(self, repo, rev):
100 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo)
101
102 def ignore(self, target, repo):
103 module = os.path.relpath(target, repo)
104 exclude_file = os.path.join(repo, ".git", "info", "exclude")
105 _ensure_line_exists(exclude_file, module)
106 149
107 repo_types = OrderedDict(( 150 repo_types = OrderedDict((
108 ("hg", Mercurial()), 151 ('hg', Mercurial()),
109 ("git", Git()), 152 ('git', Git()),
110 )) 153 ))
111 154
155 # [vcs:]value
156 item_regexp = re.compile(
157 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?'
158 '(.+)$'
159 )
160
161 # [url@]rev
162 source_regexp = re.compile(
163 '^(?:(.*)@)?'
164 '(.+)$'
165 )
166
167
168 def merge_seqs(seq1, seq2):
169 """Return a list of any truthy values from the suplied sequences
170
171 (None, 2), (1,) => [1, 2]
172 None, (1, 2) => [1, 2]
173 (1, 2), (3, 4) => [3, 4]
174 """
175 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
176
177
112 def parse_spec(path, line): 178 def parse_spec(path, line):
113 if "=" not in line: 179 if '=' not in line:
114 logging.warning("Invalid line in file %s: %s" % (path, line)) 180 logging.warning('Invalid line in file %s: %s' % (path, line))
115 return None, None 181 return None, None
116 182
117 key, value = line.split("=", 1) 183 key, value = line.split('=', 1)
118 key = key.strip() 184 key = key.strip()
119 items = value.split() 185 items = value.split()
120 if not len(items): 186 if not len(items):
121 logging.warning("No value specified for key %s in file %s" % (key, path)) 187 logging.warning('No value specified for key %s in file %s' % (key, path) )
122 return key, None 188 return key, None
123 189
124 result = OrderedDict() 190 result = OrderedDict()
125 if not key.startswith("_"): 191 is_dependency_field = not key.startswith('_')
126 result["_source"] = items.pop(0) 192
127 193 for i, item in enumerate(items):
128 for item in items: 194 try:
129 if ":" in item: 195 vcs, value = re.search(item_regexp, item).groups()
130 type, value = item.split(":", 1) 196 vcs = vcs or '*'
197 if is_dependency_field:
198 if i == 0 and vcs == '*':
199 # In order to be backwards compatible we have to assume that the first
200 # source contains only a URL/path for the repo if it does no t contain
201 # the VCS part
202 url_rev = (value, None)
203 else:
204 url_rev = re.search(source_regexp, value).groups()
205 result[vcs] = merge_seqs(result.get(vcs), url_rev)
206 else:
207 if vcs in result:
208 logging.warning('Ignoring duplicate value for type %r '
209 '(key %r in file %r)' % (vcs, key, path))
210 result[vcs] = value
211 except AttributeError:
212 logging.warning('Ignoring invalid item %r for type %r '
213 '(key %r in file %r)' % (item, vcs, key, path))
214 continue
215 return key, result
216
217
218 def read_deps(repodir):
219 result = {}
220 deps_path = os.path.join(repodir, 'dependencies')
221 try:
222 with io.open(deps_path, 'rt', encoding='utf-8') as handle:
223 for line in handle:
224 # Remove comments and whitespace
225 line = re.sub(r'#.*', '', line).strip()
226 if not line:
227 continue
228
229 key, spec = parse_spec(deps_path, line)
230 if spec:
231 result[key] = spec
232 return result
233 except IOError as e:
234 if e.errno != errno.ENOENT:
235 raise
236 return None
237
238
239 def safe_join(path, subpath):
240 # This has been inspired by Flask's safe_join() function
241 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
242 if any(sep in subpath for sep in forbidden):
243 raise Exception('Illegal directory separator in dependency path %s' % su bpath)
244
245 normpath = posixpath.normpath(subpath)
246 if posixpath.isabs(normpath):
247 raise Exception('Dependency path %s cannot be absolute' % subpath)
248 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
249 raise Exception('Dependency path %s has to be inside the repository' % s ubpath)
250 return os.path.join(path, *normpath.split(posixpath.sep))
251
252
253 def get_repo_type(repo):
254 for name, repotype in repo_types.iteritems():
255 if repotype.istype(repo):
256 return name
257 return 'hg'
258
259
260 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
261 if os.path.exists(target):
262 return
263
264 if SKIP_DEPENDENCY_UPDATES:
265 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
266 '%s not cloned', target)
267 return
268
269 postprocess_url = repo_types[type].postprocess_url
270 root = postprocess_url(root)
271 sourcename = postprocess_url(sourcename)
272
273 if os.path.exists(root):
274 url = os.path.join(root, sourcename)
131 else: 275 else:
132 type, value = ("*", item) 276 url = urlparse.urljoin(root, sourcename)
133 if type in result: 277
134 logging.warning("Ignoring duplicate value for type %s (key %s in file %s)" % (type, key, path)) 278 logging.info('Cloning repository %s into %s' % (url, target))
135 else: 279 repo_types[type].clone(url, target)
136 result[type] = value 280 repo_types[parenttype].ignore(target, parentrepo)
137 return key, result 281
138 282
139 def read_deps(repodir): 283 def update_repo(target, type, revision):
140 result = {}
141 deps_path = os.path.join(repodir, "dependencies")
142 try:
143 with io.open(deps_path, "rt", encoding="utf-8") as handle:
144 for line in handle:
145 # Remove comments and whitespace
146 line = re.sub(r"#.*", "", line).strip()
147 if not line:
148 continue
149
150 key, spec = parse_spec(deps_path, line)
151 if spec:
152 result[key] = spec
153 return result
154 except IOError, e:
155 if e.errno != errno.ENOENT:
156 raise
157 return None
158
159 def safe_join(path, subpath):
160 # This has been inspired by Flask's safe_join() function
161 forbidden = set([os.sep, os.altsep]) - set([posixpath.sep, None])
162 if any(sep in subpath for sep in forbidden):
163 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
164
165 normpath = posixpath.normpath(subpath)
166 if posixpath.isabs(normpath):
167 raise Exception("Dependency path %s cannot be absolute" % subpath)
168 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
169 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
170 return os.path.join(path, *normpath.split(posixpath.sep))
171
172 def get_repo_type(repo):
173 for name, repotype in repo_types.iteritems():
174 if repotype.istype(repo):
175 return name
176 return None
177
178 def ensure_repo(parentrepo, target, roots, sourcename):
179 if os.path.exists(target):
180 return
181
182 parenttype = get_repo_type(parentrepo)
183 type = None
184 for key in roots:
185 if key == parenttype or (key in repo_types and type is None):
186 type = key
187 if type is None:
188 raise Exception("No valid source found to create %s" % target)
189
190 if os.path.exists(roots[type]):
191 url = os.path.join(roots[type], sourcename)
192 else:
193 url = urlparse.urljoin(roots[type], sourcename)
194
195 logging.info("Cloning repository %s into %s" % (url, target))
196 repo_types[type].clone(url, target)
197
198 for repo in repo_types.itervalues():
199 if repo.istype(parentrepo):
200 repo.ignore(target, parentrepo)
201
202 def update_repo(target, revisions):
203 type = get_repo_type(target)
204 if type is None:
205 logging.warning("Type of repository %s unknown, skipping update" % target)
206 return
207
208 if type in revisions:
209 revision = revisions[type]
210 elif "*" in revisions:
211 revision = revisions["*"]
212 else:
213 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
214 return
215
216 resolved_revision = repo_types[type].get_revision_id(target, revision)
217 if not resolved_revision:
218 logging.info("Revision %s is unknown, downloading remote changes" % revision )
219 repo_types[type].pull(target)
220 resolved_revision = repo_types[type].get_revision_id(target, revision) 284 resolved_revision = repo_types[type].get_revision_id(target, revision)
221 if not resolved_revision: 285 current_revision = repo_types[type].get_revision_id(target)
222 raise Exception("Failed to resolve revision %s" % revision) 286
223 287 if resolved_revision != current_revision:
224 current_revision = repo_types[type].get_revision_id(target) 288 if SKIP_DEPENDENCY_UPDATES:
225 if resolved_revision != current_revision: 289 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
226 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision)) 290 '%s not checked out to %s', target, revision)
227 repo_types[type].update(target, resolved_revision) 291 return
292
293 if not resolved_revision:
294 logging.info('Revision %s is unknown, downloading remote changes' % revision)
295 repo_types[type].pull(target)
296 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
297 if not resolved_revision:
298 raise Exception('Failed to resolve revision %s' % revision)
299
300 logging.info('Updating repository %s to revision %s' % (target, resolved _revision))
301 repo_types[type].update(target, resolved_revision, revision)
302
228 303
229 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 304 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
230 config = read_deps(repodir) 305 config = read_deps(repodir)
231 if config is None: 306 if config is None:
232 if level == 0: 307 if level == 0:
233 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 308 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE))
234 return 309 return
235 if level >= 10: 310 if level >= 10:
236 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 311 logging.warning('Too much subrepository nesting, ignoring %s' % repo)
237 312 return
238 if overrideroots is not None: 313
239 config["_root"] = overrideroots 314 if overrideroots is not None:
240 315 config['_root'] = overrideroots
241 for dir, revisions in config.iteritems(): 316
242 if dir.startswith("_") or revisions["_source"] in skipdependencies: 317 for dir, sources in config.iteritems():
243 continue 318 if (dir.startswith('_') or
244 target = safe_join(repodir, dir) 319 skipdependencies.intersection([s[0] for s in sources if s[0]])):
245 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) 320 continue
246 update_repo(target, revisions) 321
247 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies) 322 target = safe_join(repodir, dir)
248 323 parenttype = get_repo_type(repodir)
249 if self_update and "_self" in config and "*" in config["_self"]: 324 _root = config.get('_root', {})
250 source = safe_join(repodir, config["_self"]["*"]) 325
251 try: 326 for key in sources.keys() + _root.keys():
252 with io.open(source, "rb") as handle: 327 if key == parenttype or key is None and vcs != '*':
253 sourcedata = handle.read() 328 vcs = key
254 except IOError, e: 329 source, rev = merge_seqs(sources.get('*'), sources.get(vcs))
255 if e.errno != errno.ENOENT: 330
256 raise 331 if not (vcs and source and rev):
257 logging.warning("File %s doesn't exist, skipping self-update" % source) 332 logging.warning('No valid source / revision found to create %s' % ta rget)
258 return 333 continue
259 334
260 target = __file__ 335 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source )
261 with io.open(target, "rb") as handle: 336 update_repo(target, vcs, rev)
262 targetdata = handle.read() 337 resolve_deps(target, level + 1, self_update=False,
263 338 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
264 if sourcedata != targetdata: 339
265 logging.info("Updating %s from %s, don't forget to commit" % (source, targ et)) 340 if self_update and '_self' in config and '*' in config['_self']:
266 with io.open(target, "wb") as handle: 341 source = safe_join(repodir, config['_self']['*'])
267 handle.write(sourcedata) 342 try:
268 if __name__ == "__main__": 343 with io.open(source, 'rb') as handle:
269 logging.info("Restarting %s" % target) 344 sourcedata = handle.read()
270 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]) 345 except IOError as e:
271 else: 346 if e.errno != errno.ENOENT:
272 logging.warning("Cannot restart %s automatically, please rerun" % target ) 347 raise
348 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
349 return
350
351 target = __file__
352 with io.open(target, 'rb') as handle:
353 targetdata = handle.read()
354
355 if sourcedata != targetdata:
356 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
357 with io.open(target, 'wb') as handle:
358 handle.write(sourcedata)
359 if __name__ == '__main__':
360 logging.info('Restarting %s' % target)
361 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
362 else:
363 logging.warning('Cannot restart %s automatically, please rerun' % target)
364
273 365
274 def _ensure_line_exists(path, pattern): 366 def _ensure_line_exists(path, pattern):
275 with open(path, 'a+') as f: 367 with open(path, 'a+') as f:
276 file_content = [l.strip() for l in f.readlines()] 368 f.seek(0, os.SEEK_SET)
277 if not pattern in file_content: 369 file_content = [l.strip() for l in f.readlines()]
278 file_content.append(pattern) 370 if not pattern in file_content:
279 f.seek(0, os.SEEK_SET) 371 file_content.append(pattern)
280 f.truncate() 372 f.seek(0, os.SEEK_SET)
281 for l in file_content: 373 f.truncate()
282 print >>f, l 374 for l in file_content:
283 375 print >>f, l
284 if __name__ == "__main__": 376
285 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 377 if __name__ == '__main__':
286 378 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
287 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 379
288 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path") 380 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.')
289 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output") 381 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path')
290 args = parser.parse_args() 382 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output')
291 383 args = parser.parse_args()
292 if args.quiet: 384
293 logging.disable(logging.INFO) 385 if args.quiet:
294 386 logging.disable(logging.INFO)
295 repos = args.repos 387
296 if not len(repos): 388 repos = args.repos
297 repos = [os.path.dirname(__file__)] 389 if not len(repos):
298 for repo in repos: 390 repos = [os.path.dirname(__file__)]
299 resolve_deps(repo) 391 for repo in repos:
392 resolve_deps(repo)
OLDNEW
« dependencies ('K') | « dependencies ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld