Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29399743: Issue 3768 - Check (and fix, if needed) multilocale builds (Closed)
Patch Set: Created March 31, 2017, 9:34 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« dependencies ('K') | « dependencies ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8 2 # coding: utf-8
anton 2017/04/03 06:28:11 it seems to be not the latest revision body for 'e
diegocarloslima 2017/04/03 09:57:09 It uses the revision stored in adblockbrowser to u
3 3
4 # This Source Code Form is subject to the terms of the Mozilla Public 4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 7
8 import sys 8 import sys
9 import os 9 import os
10 import posixpath 10 import posixpath
11 import re 11 import re
12 import io 12 import io
13 import errno 13 import errno
14 import logging 14 import logging
15 import subprocess 15 import subprocess
16 import urlparse 16 import urlparse
17 import argparse 17 import argparse
18 18
19 from collections import OrderedDict 19 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 20 from ConfigParser import RawConfigParser
21 21
22 USAGE = """ 22 USAGE = """
23 A dependencies file should look like this: 23 A dependencies file should look like this:
24 24
25 # VCS-specific root URLs for the repositories 25 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory 29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # at tag "1.2". 30 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific 32 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
35 """ 41 """
36 42
43 SKIP_DEPENDENCY_UPDATES = os.environ.get(
44 "SKIP_DEPENDENCY_UPDATES", ""
45 ).lower() not in ("", "0", "false")
46
47
37 class Mercurial(): 48 class Mercurial():
38 def istype(self, repodir): 49 def istype(self, repodir):
39 return os.path.exists(os.path.join(repodir, ".hg")) 50 return os.path.exists(os.path.join(repodir, ".hg"))
40 51
41 def clone(self, source, target): 52 def clone(self, source, target):
42 if not source.endswith("/"): 53 if not source.endswith("/"):
43 source += "/" 54 source += "/"
44 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t]) 55 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, t arget])
45 56
46 def get_revision_id(self, repo, rev=None): 57 def get_revision_id(self, repo, rev=None):
47 command = ["hg", "id", "--repository", repo, "--id"] 58 command = ["hg", "id", "--repository", repo, "--id"]
48 if rev: 59 if rev:
49 command.extend(["--rev", rev]) 60 command.extend(["--rev", rev])
50 61
51 # Ignore stderr output and return code here: if revision lookup failed we 62 # Ignore stderr output and return code here: if revision lookup failed w e
52 # should simply return an empty string. 63 # should simply return an empty string.
53 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0] 64 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
54 return result.strip() 65 return result.strip()
55 66
56 def pull(self, repo): 67 def pull(self, repo):
57 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 68 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"])
58 69
59 def update(self, repo, rev): 70 def update(self, repo, rev, revname):
60 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev]) 71 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev])
61 72
62 def ignore(self, target, repo): 73 def ignore(self, target, repo):
63 74
64 if not self.istype(target): 75 if not self.istype(target):
65 76
66 config_path = os.path.join(repo, ".hg", "hgrc") 77 config_path = os.path.join(repo, ".hg", "hgrc")
67 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies")) 78 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencie s"))
68 79
69 config = RawConfigParser() 80 config = RawConfigParser()
70 config.read(config_path) 81 config.read(config_path)
71 82
72 if not config.has_section("ui"): 83 if not config.has_section("ui"):
73 config.add_section("ui") 84 config.add_section("ui")
74 85
75 config.set("ui", "ignore.dependencies", ignore_path) 86 config.set("ui", "ignore.dependencies", ignore_path)
76 with open(config_path, "w") as stream: 87 with open(config_path, "w") as stream:
77 config.write(stream) 88 config.write(stream)
78 89
79 module = os.path.relpath(target, repo) 90 module = os.path.relpath(target, repo)
80 _ensure_line_exists(ignore_path, module) 91 _ensure_line_exists(ignore_path, module)
81 92
82 def postprocess_url(self, url): 93 def postprocess_url(self, url):
83 return url 94 return url
95
84 96
85 class Git(): 97 class Git():
86 def istype(self, repodir): 98 def istype(self, repodir):
87 return os.path.exists(os.path.join(repodir, ".git")) 99 return os.path.exists(os.path.join(repodir, ".git"))
88 100
89 def clone(self, source, target): 101 def clone(self, source, target):
90 source = source.rstrip("/") 102 source = source.rstrip("/")
91 if not source.endswith(".git"): 103 if not source.endswith(".git"):
92 source += ".git" 104 source += ".git"
93 subprocess.check_call(["git", "clone", "--quiet", source, target]) 105 subprocess.check_call(["git", "clone", "--quiet", source, target])
94 106
95 def get_revision_id(self, repo, rev="HEAD"): 107 def get_revision_id(self, repo, rev="HEAD"):
96 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 108 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}']
97 return subprocess.check_output(command, cwd=repo).strip() 109 return subprocess.check_output(command, cwd=repo).strip()
98 110
99 def pull(self, repo): 111 def pull(self, repo):
100 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po) 112 # Fetch tracked branches, new tags and the list of available remote bran ches
101 113 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cw d=repo)
102 def update(self, repo, rev): 114 # Next we need to ensure all remote branches are tracked
103 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) 115 newly_tracked = False
104 116 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=re po)
105 def ignore(self, target, repo): 117 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
106 module = os.path.relpath(target, repo) 118 remote, local = match.groups()
107 exclude_file = os.path.join(repo, ".git", "info", "exclude") 119 with open(os.devnull, "wb") as devnull:
108 _ensure_line_exists(exclude_file, module) 120 if subprocess.call(["git", "branch", "--track", local, remote],
109 121 cwd=repo, stdout=devnull, stderr=devnull) == 0:
110 def postprocess_url(self, url): 122 newly_tracked = True
111 # Handle alternative syntax of SSH URLS 123 # Finally fetch any newly tracked remote branches
112 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 124 if newly_tracked:
113 return "ssh://" + url.replace(":", "/", 1) 125 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=rep o)
114 return url 126
127 def update(self, repo, rev, revname):
128 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo)
129
130 def ignore(self, target, repo):
131 module = os.path.sep + os.path.relpath(target, repo)
132 exclude_file = os.path.join(repo, ".git", "info", "exclude")
133 _ensure_line_exists(exclude_file, module)
134
135 def postprocess_url(self, url):
136 # Handle alternative syntax of SSH URLS
137 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
138 return "ssh://" + url.replace(":", "/", 1)
139 return url
115 140
116 repo_types = OrderedDict(( 141 repo_types = OrderedDict((
117 ("hg", Mercurial()), 142 ("hg", Mercurial()),
118 ("git", Git()), 143 ("git", Git()),
119 )) 144 ))
120 145
146 # [vcs:]value
147 item_regexp = re.compile(
148 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?"
149 "(.+)$"
150 )
151
152 # [url@]rev
153 source_regexp = re.compile(
154 "^(?:(.*)@)?"
155 "(.+)$"
156 )
157
158
159 def merge_seqs(seq1, seq2):
160 """Return a list of any truthy values from the suplied sequences
161
162 (None, 2), (1,) => [1, 2]
163 None, (1, 2) => [1, 2]
164 (1, 2), (3, 4) => [3, 4]
165 """
166 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
167
168
121 def parse_spec(path, line): 169 def parse_spec(path, line):
122 if "=" not in line: 170 if "=" not in line:
123 logging.warning("Invalid line in file %s: %s" % (path, line)) 171 logging.warning("Invalid line in file %s: %s" % (path, line))
124 return None, None 172 return None, None
125 173
126 key, value = line.split("=", 1) 174 key, value = line.split("=", 1)
127 key = key.strip() 175 key = key.strip()
128 items = value.split() 176 items = value.split()
129 if not len(items): 177 if not len(items):
130 logging.warning("No value specified for key %s in file %s" % (key, path)) 178 logging.warning("No value specified for key %s in file %s" % (key, path) )
131 return key, None 179 return key, None
132 180
133 result = OrderedDict() 181 result = OrderedDict()
134 if not key.startswith("_"): 182 is_dependency_field = not key.startswith("_")
135 result["_source"] = items.pop(0) 183
136 184 for i, item in enumerate(items):
137 for item in items: 185 try:
138 if ":" in item: 186 vcs, value = re.search(item_regexp, item).groups()
139 type, value = item.split(":", 1) 187 vcs = vcs or "*"
188 if is_dependency_field:
189 if i == 0 and vcs == "*":
190 # In order to be backwards compatible we have to assume that the first
191 # source contains only a URL/path for the repo if it does no t contain
192 # the VCS part
193 url_rev = (value, None)
194 else:
195 url_rev = re.search(source_regexp, value).groups()
196 result[vcs] = merge_seqs(result.get(vcs), url_rev)
197 else:
198 if vcs in result:
199 logging.warning("Ignoring duplicate value for type %r "
200 "(key %r in file %r)" % (vcs, key, path))
201 result[vcs] = value
202 except AttributeError:
203 logging.warning("Ignoring invalid item %r for type %r "
204 "(key %r in file %r)" % (item, vcs, key, path))
205 continue
206 return key, result
207
208
209 def read_deps(repodir):
210 result = {}
211 deps_path = os.path.join(repodir, "dependencies")
212 try:
213 with io.open(deps_path, "rt", encoding="utf-8") as handle:
214 for line in handle:
215 # Remove comments and whitespace
216 line = re.sub(r"#.*", "", line).strip()
217 if not line:
218 continue
219
220 key, spec = parse_spec(deps_path, line)
221 if spec:
222 result[key] = spec
223 return result
224 except IOError, e:
225 if e.errno != errno.ENOENT:
226 raise
227 return None
228
229
230 def safe_join(path, subpath):
231 # This has been inspired by Flask's safe_join() function
232 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
233 if any(sep in subpath for sep in forbidden):
234 raise Exception("Illegal directory separator in dependency path %s" % su bpath)
235
236 normpath = posixpath.normpath(subpath)
237 if posixpath.isabs(normpath):
238 raise Exception("Dependency path %s cannot be absolute" % subpath)
239 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
240 raise Exception("Dependency path %s has to be inside the repository" % s ubpath)
241 return os.path.join(path, *normpath.split(posixpath.sep))
242
243
244 def get_repo_type(repo):
245 for name, repotype in repo_types.iteritems():
246 if repotype.istype(repo):
247 return name
248 return "hg"
249
250
251 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
252 if os.path.exists(target):
253 return
254
255 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
257 "%s not cloned", target)
258 return
259
260 postprocess_url = repo_types[type].postprocess_url
261 root = postprocess_url(root)
262 sourcename = postprocess_url(sourcename)
263
264 if os.path.exists(root):
265 url = os.path.join(root, sourcename)
140 else: 266 else:
141 type, value = ("*", item) 267 url = urlparse.urljoin(root, sourcename)
142 if type in result: 268
143 logging.warning("Ignoring duplicate value for type %s (key %s in file %s)" % (type, key, path)) 269 logging.info("Cloning repository %s into %s" % (url, target))
144 else: 270 repo_types[type].clone(url, target)
145 result[type] = value 271 repo_types[parenttype].ignore(target, parentrepo)
146 return key, result 272
147 273
148 def read_deps(repodir): 274 def update_repo(target, type, revision):
149 result = {}
150 deps_path = os.path.join(repodir, "dependencies")
151 try:
152 with io.open(deps_path, "rt", encoding="utf-8") as handle:
153 for line in handle:
154 # Remove comments and whitespace
155 line = re.sub(r"#.*", "", line).strip()
156 if not line:
157 continue
158
159 key, spec = parse_spec(deps_path, line)
160 if spec:
161 result[key] = spec
162 return result
163 except IOError, e:
164 if e.errno != errno.ENOENT:
165 raise
166 return None
167
168 def safe_join(path, subpath):
169 # This has been inspired by Flask's safe_join() function
170 forbidden = set([os.sep, os.altsep]) - set([posixpath.sep, None])
171 if any(sep in subpath for sep in forbidden):
172 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
173
174 normpath = posixpath.normpath(subpath)
175 if posixpath.isabs(normpath):
176 raise Exception("Dependency path %s cannot be absolute" % subpath)
177 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
178 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
179 return os.path.join(path, *normpath.split(posixpath.sep))
180
181 def get_repo_type(repo):
182 for name, repotype in repo_types.iteritems():
183 if repotype.istype(repo):
184 return name
185 return None
186
187 def ensure_repo(parentrepo, target, roots, sourcename):
188 if os.path.exists(target):
189 return
190
191 parenttype = get_repo_type(parentrepo)
192 type = None
193 for key in roots:
194 if key == parenttype or (key in repo_types and type is None):
195 type = key
196 if type is None:
197 raise Exception("No valid source found to create %s" % target)
198
199 postprocess_url = repo_types[type].postprocess_url
200 root = postprocess_url(roots[type])
201 sourcename = postprocess_url(sourcename)
202
203 if os.path.exists(root):
204 url = os.path.join(root, sourcename)
205 else:
206 url = urlparse.urljoin(root, sourcename)
207
208 logging.info("Cloning repository %s into %s" % (url, target))
209 repo_types[type].clone(url, target)
210
211 for repo in repo_types.itervalues():
212 if repo.istype(parentrepo):
213 repo.ignore(target, parentrepo)
214
215 def update_repo(target, revisions):
216 type = get_repo_type(target)
217 if type is None:
218 logging.warning("Type of repository %s unknown, skipping update" % target)
219 return
220
221 if type in revisions:
222 revision = revisions[type]
223 elif "*" in revisions:
224 revision = revisions["*"]
225 else:
226 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
227 return
228
229 resolved_revision = repo_types[type].get_revision_id(target, revision)
230 if not resolved_revision:
231 logging.info("Revision %s is unknown, downloading remote changes" % revision )
232 repo_types[type].pull(target)
233 resolved_revision = repo_types[type].get_revision_id(target, revision) 275 resolved_revision = repo_types[type].get_revision_id(target, revision)
234 if not resolved_revision: 276 current_revision = repo_types[type].get_revision_id(target)
235 raise Exception("Failed to resolve revision %s" % revision) 277
236 278 if resolved_revision != current_revision:
237 current_revision = repo_types[type].get_revision_id(target) 279 if SKIP_DEPENDENCY_UPDATES:
238 if resolved_revision != current_revision: 280 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
239 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision)) 281 "%s not checked out to %s", target, revision)
240 repo_types[type].update(target, resolved_revision) 282 return
283
284 if not resolved_revision:
285 logging.info("Revision %s is unknown, downloading remote changes" % revision)
286 repo_types[type].pull(target)
287 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
288 if not resolved_revision:
289 raise Exception("Failed to resolve revision %s" % revision)
290
291 logging.info("Updating repository %s to revision %s" % (target, resolved _revision))
292 repo_types[type].update(target, resolved_revision, revision)
293
241 294
242 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 295 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
243 config = read_deps(repodir) 296 config = read_deps(repodir)
244 if config is None: 297 if config is None:
245 if level == 0: 298 if level == 0:
246 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 299 logging.warning("No dependencies file in directory %s, nothing to do ...\n%s" % (repodir, USAGE))
247 return 300 return
248 if level >= 10: 301 if level >= 10:
249 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 302 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
250 303 return
251 if overrideroots is not None: 304
252 config["_root"] = overrideroots 305 if overrideroots is not None:
253 306 config["_root"] = overrideroots
254 for dir, revisions in config.iteritems(): 307
255 if dir.startswith("_") or revisions["_source"] in skipdependencies: 308 for dir, sources in config.iteritems():
256 continue 309 if (dir.startswith("_") or
257 target = safe_join(repodir, dir) 310 skipdependencies.intersection([s[0] for s in sources if s[0]])):
258 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) 311 continue
259 update_repo(target, revisions) 312
260 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies) 313 target = safe_join(repodir, dir)
261 314 parenttype = get_repo_type(repodir)
262 if self_update and "_self" in config and "*" in config["_self"]: 315 _root = config.get("_root", {})
263 source = safe_join(repodir, config["_self"]["*"]) 316
264 try: 317 for key in sources.keys() + _root.keys():
265 with io.open(source, "rb") as handle: 318 if key == parenttype or key is None and vcs != "*":
266 sourcedata = handle.read() 319 vcs = key
267 except IOError, e: 320 source, rev = merge_seqs(sources.get("*"), sources.get(vcs))
268 if e.errno != errno.ENOENT: 321
269 raise 322 if not (vcs and source and rev):
270 logging.warning("File %s doesn't exist, skipping self-update" % source) 323 logging.warning("No valid source / revision found to create %s" % ta rget)
271 return 324 continue
272 325
273 target = __file__ 326 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source )
274 with io.open(target, "rb") as handle: 327 update_repo(target, vcs, rev)
275 targetdata = handle.read() 328 resolve_deps(target, level + 1, self_update=False,
276 329 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
277 if sourcedata != targetdata: 330
278 logging.info("Updating %s from %s, don't forget to commit" % (source, targ et)) 331 if self_update and "_self" in config and "*" in config["_self"]:
279 with io.open(target, "wb") as handle: 332 source = safe_join(repodir, config["_self"]["*"])
280 handle.write(sourcedata) 333 try:
281 if __name__ == "__main__": 334 with io.open(source, "rb") as handle:
282 logging.info("Restarting %s" % target) 335 sourcedata = handle.read()
283 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]) 336 except IOError, e:
284 else: 337 if e.errno != errno.ENOENT:
285 logging.warning("Cannot restart %s automatically, please rerun" % target ) 338 raise
339 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
340 return
341
342 target = __file__
343 with io.open(target, "rb") as handle:
344 targetdata = handle.read()
345
346 if sourcedata != targetdata:
347 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
348 with io.open(target, "wb") as handle:
349 handle.write(sourcedata)
350 if __name__ == "__main__":
351 logging.info("Restarting %s" % target)
352 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
353 else:
354 logging.warning("Cannot restart %s automatically, please rerun" % target)
355
286 356
287 def _ensure_line_exists(path, pattern): 357 def _ensure_line_exists(path, pattern):
288 with open(path, 'a+') as f: 358 with open(path, 'a+') as f:
289 file_content = [l.strip() for l in f.readlines()] 359 file_content = [l.strip() for l in f.readlines()]
290 if not pattern in file_content: 360 if not pattern in file_content:
291 file_content.append(pattern) 361 file_content.append(pattern)
292 f.seek(0, os.SEEK_SET) 362 f.seek(0, os.SEEK_SET)
293 f.truncate() 363 f.truncate()
294 for l in file_content: 364 for l in file_content:
295 print >>f, l 365 print >>f, l
296 366
297 if __name__ == "__main__": 367 if __name__ == "__main__":
298 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 368 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
299 369
300 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 370 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.")
301 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path") 371 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help ="Repository path")
302 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output") 372 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress inf ormational output")
303 args = parser.parse_args() 373 args = parser.parse_args()
304 374
305 if args.quiet: 375 if args.quiet:
306 logging.disable(logging.INFO) 376 logging.disable(logging.INFO)
307 377
308 repos = args.repos 378 repos = args.repos
309 if not len(repos): 379 if not len(repos):
310 repos = [os.path.dirname(__file__)] 380 repos = [os.path.dirname(__file__)]
311 for repo in repos: 381 for repo in repos:
312 resolve_deps(repo) 382 resolve_deps(repo)
OLDNEW
« dependencies ('K') | « dependencies ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld