Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29341151: Issue 4019 - Added "Edge" to platform choices in Issues tracker at issues1. (Closed)
Patch Set: Created May 10, 2016, 3:35 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | hiera/install_precise.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8 2 # coding: utf-8
3 3
4 # This Source Code Form is subject to the terms of the Mozilla Public 4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 7
8 import sys 8 import sys
9 import os 9 import os
10 import posixpath 10 import posixpath
11 import re 11 import re
12 import io 12 import io
13 import errno 13 import errno
14 import logging 14 import logging
15 import subprocess 15 import subprocess
16 import urlparse 16 import urlparse
17 import argparse 17 import argparse
18 18
19 from collections import OrderedDict 19 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 20 from ConfigParser import RawConfigParser
21 21
22 USAGE = """ 22 USAGE = """
23 A dependencies file should look like this: 23 A dependencies file should look like this:
24 24
25 # VCS-specific root URLs for the repositories 25 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at 29 # Check out elemhidehelper repository into extensions/elemhidehelper directory
30 # tag "1.2". 30 # at tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Clone buildtools repository into buildtools directory at VCS-specific 32 # Check out buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
41 """ 35 """
42 36
43 SKIP_DEPENDENCY_UPDATES = os.environ.get( 37 SKIP_DEPENDENCY_UPDATES = os.environ.get(
44 "SKIP_DEPENDENCY_UPDATES", "" 38 "SKIP_DEPENDENCY_UPDATES", ""
45 ).lower() not in ("", "0", "false") 39 ).lower() not in ("", "0", "false")
46 40
47
48 class Mercurial(): 41 class Mercurial():
49 def istype(self, repodir): 42 def istype(self, repodir):
50 return os.path.exists(os.path.join(repodir, ".hg")) 43 return os.path.exists(os.path.join(repodir, ".hg"))
51 44
52 def clone(self, source, target): 45 def clone(self, source, target):
53 if not source.endswith("/"): 46 if not source.endswith("/"):
54 source += "/" 47 source += "/"
55 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, t arget]) 48 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t])
56 49
57 def get_revision_id(self, repo, rev=None): 50 def get_revision_id(self, repo, rev=None):
58 command = ["hg", "id", "--repository", repo, "--id"] 51 command = ["hg", "id", "--repository", repo, "--id"]
59 if rev: 52 if rev:
60 command.extend(["--rev", rev]) 53 command.extend(["--rev", rev])
61 54
62 # Ignore stderr output and return code here: if revision lookup failed w e 55 # Ignore stderr output and return code here: if revision lookup failed we
63 # should simply return an empty string. 56 # should simply return an empty string.
64 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0] 57 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0]
65 return result.strip() 58 return result.strip()
66 59
67 def pull(self, repo): 60 def pull(self, repo):
68 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 61 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"])
69 62
70 def update(self, repo, rev, revname): 63 def update(self, repo, rev):
71 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev]) 64 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev])
72 65
73 def ignore(self, target, repo): 66 def ignore(self, target, repo):
74 67
75 if not self.istype(target): 68 if not self.istype(target):
76 69
77 config_path = os.path.join(repo, ".hg", "hgrc") 70 config_path = os.path.join(repo, ".hg", "hgrc")
78 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencie s")) 71 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies"))
79 72
80 config = RawConfigParser() 73 config = RawConfigParser()
81 config.read(config_path) 74 config.read(config_path)
82 75
83 if not config.has_section("ui"): 76 if not config.has_section("ui"):
84 config.add_section("ui") 77 config.add_section("ui")
85 78
86 config.set("ui", "ignore.dependencies", ignore_path) 79 config.set("ui", "ignore.dependencies", ignore_path)
87 with open(config_path, "w") as stream: 80 with open(config_path, "w") as stream:
88 config.write(stream) 81 config.write(stream)
89 82
90 module = os.path.relpath(target, repo) 83 module = os.path.relpath(target, repo)
91 _ensure_line_exists(ignore_path, module) 84 _ensure_line_exists(ignore_path, module)
92 85
93 def postprocess_url(self, url): 86 def postprocess_url(self, url):
94 return url 87 return url
95
96 88
97 class Git(): 89 class Git():
98 def istype(self, repodir): 90 def istype(self, repodir):
99 return os.path.exists(os.path.join(repodir, ".git")) 91 return os.path.exists(os.path.join(repodir, ".git"))
100 92
101 def clone(self, source, target): 93 def clone(self, source, target):
102 source = source.rstrip("/") 94 source = source.rstrip("/")
103 if not source.endswith(".git"): 95 if not source.endswith(".git"):
104 source += ".git" 96 source += ".git"
105 subprocess.check_call(["git", "clone", "--quiet", source, target]) 97 subprocess.check_call(["git", "clone", "--quiet", source, target])
106 98
107 def get_revision_id(self, repo, rev="HEAD"): 99 def get_revision_id(self, repo, rev="HEAD"):
108 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 100 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}']
109 return subprocess.check_output(command, cwd=repo).strip() 101 return subprocess.check_output(command, cwd=repo).strip()
110 102
111 def pull(self, repo): 103 def pull(self, repo):
112 # Fetch tracked branches, new tags and the list of available remote bran ches 104 # Fetch tracked branches, new tags and the list of available remote branches
113 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cw d=repo) 105 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po)
114 # Next we need to ensure all remote branches are tracked 106 # Next we need to ensure all remote branches are tracked
115 newly_tracked = False 107 newly_tracked = False
116 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=re po) 108 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo)
117 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): 109 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
118 remote, local = match.groups() 110 remote, local = match.groups()
119 with open(os.devnull, "wb") as devnull: 111 with open(os.devnull, "wb") as devnull:
120 if subprocess.call(["git", "branch", "--track", local, remote], 112 if subprocess.call(["git", "branch", "--track", local, remote],
121 cwd=repo, stdout=devnull, stderr=devnull) == 0: 113 cwd=repo, stdout=devnull, stderr=devnull) == 0:
122 newly_tracked = True 114 newly_tracked = True
123 # Finally fetch any newly tracked remote branches 115 # Finally fetch any newly tracked remote branches
124 if newly_tracked: 116 if newly_tracked:
125 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=rep o) 117 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo)
126 118
127 def update(self, repo, rev, revname): 119 def update(self, repo, rev):
128 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo) 120 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo)
129 121
130 def ignore(self, target, repo): 122 def ignore(self, target, repo):
131 module = os.path.sep + os.path.relpath(target, repo) 123 module = os.path.relpath(target, repo)
132 exclude_file = os.path.join(repo, ".git", "info", "exclude") 124 exclude_file = os.path.join(repo, ".git", "info", "exclude")
133 _ensure_line_exists(exclude_file, module) 125 _ensure_line_exists(exclude_file, module)
134 126
135 def postprocess_url(self, url): 127 def postprocess_url(self, url):
136 # Handle alternative syntax of SSH URLS 128 # Handle alternative syntax of SSH URLS
137 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 129 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
138 return "ssh://" + url.replace(":", "/", 1) 130 return "ssh://" + url.replace(":", "/", 1)
139 return url 131 return url
140 132
141 repo_types = OrderedDict(( 133 repo_types = OrderedDict((
142 ("hg", Mercurial()), 134 ("hg", Mercurial()),
143 ("git", Git()), 135 ("git", Git()),
144 )) 136 ))
145 137
146 # [vcs:]value
147 item_regexp = re.compile(
148 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?"
149 "(.+)$"
150 )
151
152 # [url@]rev
153 source_regexp = re.compile(
154 "^(?:(.*)@)?"
155 "(.+)$"
156 )
157
158
159 def merge_seqs(seq1, seq2):
160 """Return a list of any truthy values from the suplied sequences
161
162 (None, 2), (1,) => [1, 2]
163 None, (1, 2) => [1, 2]
164 (1, 2), (3, 4) => [3, 4]
165 """
166 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
167
168
169 def parse_spec(path, line): 138 def parse_spec(path, line):
170 if "=" not in line: 139 if "=" not in line:
171 logging.warning("Invalid line in file %s: %s" % (path, line)) 140 logging.warning("Invalid line in file %s: %s" % (path, line))
172 return None, None 141 return None, None
173 142
174 key, value = line.split("=", 1) 143 key, value = line.split("=", 1)
175 key = key.strip() 144 key = key.strip()
176 items = value.split() 145 items = value.split()
177 if not len(items): 146 if not len(items):
178 logging.warning("No value specified for key %s in file %s" % (key, path) ) 147 logging.warning("No value specified for key %s in file %s" % (key, path))
179 return key, None 148 return key, None
180 149
181 result = OrderedDict() 150 result = OrderedDict()
182 is_dependency_field = not key.startswith("_") 151 if not key.startswith("_"):
183 152 result["_source"] = items.pop(0)
184 for i, item in enumerate(items): 153
185 try: 154 for item in items:
186 vcs, value = re.search(item_regexp, item).groups() 155 if ":" in item:
187 vcs = vcs or "*" 156 type, value = item.split(":", 1)
188 if is_dependency_field: 157 else:
189 if i == 0 and vcs == "*": 158 type, value = ("*", item)
190 # In order to be backwards compatible we have to assume that the first 159 if type in result:
191 # source contains only a URL/path for the repo if it does no t contain 160 logging.warning("Ignoring duplicate value for type %s (key %s in file %s)" % (type, key, path))
192 # the VCS part 161 else:
193 url_rev = (value, None) 162 result[type] = value
194 else: 163 return key, result
195 url_rev = re.search(source_regexp, value).groups()
196 result[vcs] = merge_seqs(result.get(vcs), url_rev)
197 else:
198 if vcs in result:
199 logging.warning("Ignoring duplicate value for type %r "
200 "(key %r in file %r)" % (vcs, key, path))
201 result[vcs] = value
202 except AttributeError:
203 logging.warning("Ignoring invalid item %r for type %r "
204 "(key %r in file %r)" % (item, vcs, key, path))
205 continue
206 return key, result
207
208 164
209 def read_deps(repodir): 165 def read_deps(repodir):
210 result = {} 166 result = {}
211 deps_path = os.path.join(repodir, "dependencies") 167 deps_path = os.path.join(repodir, "dependencies")
168 try:
169 with io.open(deps_path, "rt", encoding="utf-8") as handle:
170 for line in handle:
171 # Remove comments and whitespace
172 line = re.sub(r"#.*", "", line).strip()
173 if not line:
174 continue
175
176 key, spec = parse_spec(deps_path, line)
177 if spec:
178 result[key] = spec
179 return result
180 except IOError, e:
181 if e.errno != errno.ENOENT:
182 raise
183 return None
184
185 def safe_join(path, subpath):
186 # This has been inspired by Flask's safe_join() function
187 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
188 if any(sep in subpath for sep in forbidden):
189 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
190
191 normpath = posixpath.normpath(subpath)
192 if posixpath.isabs(normpath):
193 raise Exception("Dependency path %s cannot be absolute" % subpath)
194 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
195 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
196 return os.path.join(path, *normpath.split(posixpath.sep))
197
198 def get_repo_type(repo):
199 for name, repotype in repo_types.iteritems():
200 if repotype.istype(repo):
201 return name
202 return None
203
204 def ensure_repo(parentrepo, target, roots, sourcename):
205 if os.path.exists(target):
206 return
207
208 if SKIP_DEPENDENCY_UPDATES:
209 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
210 "%s not cloned", target)
211 return
212
213 parenttype = get_repo_type(parentrepo)
214 type = None
215 for key in roots:
216 if key == parenttype or (key in repo_types and type is None):
217 type = key
218 if type is None:
219 raise Exception("No valid source found to create %s" % target)
220
221 postprocess_url = repo_types[type].postprocess_url
222 root = postprocess_url(roots[type])
223 sourcename = postprocess_url(sourcename)
224
225 if os.path.exists(root):
226 url = os.path.join(root, sourcename)
227 else:
228 url = urlparse.urljoin(root, sourcename)
229
230 logging.info("Cloning repository %s into %s" % (url, target))
231 repo_types[type].clone(url, target)
232
233 for repo in repo_types.itervalues():
234 if repo.istype(parentrepo):
235 repo.ignore(target, parentrepo)
236
237 def update_repo(target, revisions):
238 type = get_repo_type(target)
239 if type is None:
240 logging.warning("Type of repository %s unknown, skipping update" % target)
241 return
242
243 if type in revisions:
244 revision = revisions[type]
245 elif "*" in revisions:
246 revision = revisions["*"]
247 else:
248 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
249 return
250
251 resolved_revision = repo_types[type].get_revision_id(target, revision)
252 current_revision = repo_types[type].get_revision_id(target)
253
254 if resolved_revision != current_revision:
255 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
257 "%s not checked out to %s", target, revision)
258 return
259
260 if not resolved_revision:
261 logging.info("Revision %s is unknown, downloading remote changes" % revisi on)
262 repo_types[type].pull(target)
263 resolved_revision = repo_types[type].get_revision_id(target, revision)
264 if not resolved_revision:
265 raise Exception("Failed to resolve revision %s" % revision)
266
267 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision))
268 repo_types[type].update(target, resolved_revision)
269
270 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
271 config = read_deps(repodir)
272 if config is None:
273 if level == 0:
274 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE))
275 return
276 if level >= 10:
277 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
278 return
279
280 if overrideroots is not None:
281 config["_root"] = overrideroots
282
283 for dir, revisions in config.iteritems():
284 if dir.startswith("_") or revisions["_source"] in skipdependencies:
285 continue
286 target = safe_join(repodir, dir)
287 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"])
288 update_repo(target, revisions)
289 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies)
290
291 if self_update and "_self" in config and "*" in config["_self"]:
292 source = safe_join(repodir, config["_self"]["*"])
212 try: 293 try:
213 with io.open(deps_path, "rt", encoding="utf-8") as handle: 294 with io.open(source, "rb") as handle:
214 for line in handle: 295 sourcedata = handle.read()
215 # Remove comments and whitespace
216 line = re.sub(r"#.*", "", line).strip()
217 if not line:
218 continue
219
220 key, spec = parse_spec(deps_path, line)
221 if spec:
222 result[key] = spec
223 return result
224 except IOError, e: 296 except IOError, e:
225 if e.errno != errno.ENOENT: 297 if e.errno != errno.ENOENT:
226 raise 298 raise
227 return None 299 logging.warning("File %s doesn't exist, skipping self-update" % source)
228 300 return
229 301
230 def safe_join(path, subpath): 302 target = __file__
231 # This has been inspired by Flask's safe_join() function 303 with io.open(target, "rb") as handle:
232 forbidden = {os.sep, os.altsep} - {posixpath.sep, None} 304 targetdata = handle.read()
233 if any(sep in subpath for sep in forbidden): 305
234 raise Exception("Illegal directory separator in dependency path %s" % su bpath) 306 if sourcedata != targetdata:
235 307 logging.info("Updating %s from %s, don't forget to commit" % (source, targ et))
236 normpath = posixpath.normpath(subpath) 308 with io.open(target, "wb") as handle:
237 if posixpath.isabs(normpath): 309 handle.write(sourcedata)
238 raise Exception("Dependency path %s cannot be absolute" % subpath) 310 if __name__ == "__main__":
239 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep): 311 logging.info("Restarting %s" % target)
240 raise Exception("Dependency path %s has to be inside the repository" % s ubpath) 312 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:])
241 return os.path.join(path, *normpath.split(posixpath.sep)) 313 else:
242 314 logging.warning("Cannot restart %s automatically, please rerun" % target )
243
244 def get_repo_type(repo):
245 for name, repotype in repo_types.iteritems():
246 if repotype.istype(repo):
247 return name
248 return "hg"
249
250
251 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
252 if os.path.exists(target):
253 return
254
255 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
257 "%s not cloned", target)
258 return
259
260 postprocess_url = repo_types[type].postprocess_url
261 root = postprocess_url(root)
262 sourcename = postprocess_url(sourcename)
263
264 if os.path.exists(root):
265 url = os.path.join(root, sourcename)
266 else:
267 url = urlparse.urljoin(root, sourcename)
268
269 logging.info("Cloning repository %s into %s" % (url, target))
270 repo_types[type].clone(url, target)
271 repo_types[parenttype].ignore(target, parentrepo)
272
273
274 def update_repo(target, type, revision):
275 resolved_revision = repo_types[type].get_revision_id(target, revision)
276 current_revision = repo_types[type].get_revision_id(target)
277
278 if resolved_revision != current_revision:
279 if SKIP_DEPENDENCY_UPDATES:
280 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
281 "%s not checked out to %s", target, revision)
282 return
283
284 if not resolved_revision:
285 logging.info("Revision %s is unknown, downloading remote changes" % revision)
286 repo_types[type].pull(target)
287 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
288 if not resolved_revision:
289 raise Exception("Failed to resolve revision %s" % revision)
290
291 logging.info("Updating repository %s to revision %s" % (target, resolved _revision))
292 repo_types[type].update(target, resolved_revision, revision)
293
294
295 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
296 config = read_deps(repodir)
297 if config is None:
298 if level == 0:
299 logging.warning("No dependencies file in directory %s, nothing to do ...\n%s" % (repodir, USAGE))
300 return
301 if level >= 10:
302 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
303 return
304
305 if overrideroots is not None:
306 config["_root"] = overrideroots
307
308 for dir, sources in config.iteritems():
309 if (dir.startswith("_") or
310 skipdependencies.intersection([s[0] for s in sources if s[0]])):
311 continue
312
313 target = safe_join(repodir, dir)
314 parenttype = get_repo_type(repodir)
315 _root = config.get("_root", {})
316
317 for key in sources.keys() + _root.keys():
318 if key == parenttype or key is None and vcs != "*":
319 vcs = key
320 source, rev = merge_seqs(sources.get("*"), sources.get(vcs))
321
322 if not (vcs and source and rev):
323 logging.warning("No valid source / revision found to create %s" % ta rget)
324 continue
325
326 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source )
327 update_repo(target, vcs, rev)
328 resolve_deps(target, level + 1, self_update=False,
329 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
330
331 if self_update and "_self" in config and "*" in config["_self"]:
332 source = safe_join(repodir, config["_self"]["*"])
333 try:
334 with io.open(source, "rb") as handle:
335 sourcedata = handle.read()
336 except IOError, e:
337 if e.errno != errno.ENOENT:
338 raise
339 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
340 return
341
342 target = __file__
343 with io.open(target, "rb") as handle:
344 targetdata = handle.read()
345
346 if sourcedata != targetdata:
347 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
348 with io.open(target, "wb") as handle:
349 handle.write(sourcedata)
350 if __name__ == "__main__":
351 logging.info("Restarting %s" % target)
352 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
353 else:
354 logging.warning("Cannot restart %s automatically, please rerun" % target)
355
356 315
357 def _ensure_line_exists(path, pattern): 316 def _ensure_line_exists(path, pattern):
358 with open(path, 'a+') as f: 317 with open(path, 'a+') as f:
359 file_content = [l.strip() for l in f.readlines()] 318 file_content = [l.strip() for l in f.readlines()]
360 if not pattern in file_content: 319 if not pattern in file_content:
361 file_content.append(pattern) 320 file_content.append(pattern)
362 f.seek(0, os.SEEK_SET) 321 f.seek(0, os.SEEK_SET)
363 f.truncate() 322 f.truncate()
364 for l in file_content: 323 for l in file_content:
365 print >>f, l 324 print >>f, l
366 325
367 if __name__ == "__main__": 326 if __name__ == "__main__":
368 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 327 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
369 328
370 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 329 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.")
371 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help ="Repository path") 330 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path")
372 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress inf ormational output") 331 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output")
373 args = parser.parse_args() 332 args = parser.parse_args()
374 333
375 if args.quiet: 334 if args.quiet:
376 logging.disable(logging.INFO) 335 logging.disable(logging.INFO)
377 336
378 repos = args.repos 337 repos = args.repos
379 if not len(repos): 338 if not len(repos):
380 repos = [os.path.dirname(__file__)] 339 repos = [os.path.dirname(__file__)]
381 for repo in repos: 340 for repo in repos:
382 resolve_deps(repo) 341 resolve_deps(repo)
OLDNEW
« no previous file with comments | « no previous file | hiera/install_precise.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld