Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29355252: Issue 4482 - Update buildtools reference in abpcrawler to revision hg:595808987fd9 (Closed)
Patch Set: Created Sept. 29, 2016, 9:29 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « dependencies ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8
3 2
4 # This Source Code Form is subject to the terms of the Mozilla Public 3 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 4 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 6
8 import sys 7 import sys
9 import os 8 import os
10 import posixpath 9 import posixpath
11 import re 10 import re
12 import io 11 import io
13 import errno 12 import errno
14 import logging 13 import logging
15 import subprocess 14 import subprocess
16 import urlparse 15 import urlparse
17 import argparse 16 import argparse
18 17
19 from collections import OrderedDict 18 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 19 from ConfigParser import RawConfigParser
21 20
22 USAGE = """ 21 USAGE = '''
23 A dependencies file should look like this: 22 A dependencies file should look like this:
24 23
25 # VCS-specific root URLs for the repositories 24 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 25 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 26 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 27 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory 28 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # at tag "1.2". 29 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 30 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific 31 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 32 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 33 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 """ 34 # Clone the adblockplus repository into adblockplus directory, overwriting the
35 # usual source URL for Git repository and specifying VCS specific revision IDs .
36 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
37 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
38 # from a specific Git repository, specifying the revision ID.
39 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
40 '''
36 41
37 SKIP_DEPENDENCY_UPDATES = os.environ.get( 42 SKIP_DEPENDENCY_UPDATES = os.environ.get(
38 "SKIP_DEPENDENCY_UPDATES", "" 43 'SKIP_DEPENDENCY_UPDATES', ''
39 ).lower() not in ("", "0", "false") 44 ).lower() not in ('', '0', 'false')
45
40 46
41 class Mercurial(): 47 class Mercurial():
42 def istype(self, repodir): 48 def istype(self, repodir):
43 return os.path.exists(os.path.join(repodir, ".hg")) 49 return os.path.exists(os.path.join(repodir, '.hg'))
44 50
45 def clone(self, source, target): 51 def clone(self, source, target):
46 if not source.endswith("/"): 52 if not source.endswith('/'):
47 source += "/" 53 source += '/'
48 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t]) 54 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget])
49 55
50 def get_revision_id(self, repo, rev=None): 56 def get_revision_id(self, repo, rev=None):
51 command = ["hg", "id", "--repository", repo, "--id"] 57 command = ['hg', 'id', '--repository', repo, '--id']
52 if rev: 58 if rev:
53 command.extend(["--rev", rev]) 59 command.extend(['--rev', rev])
54 60
55 # Ignore stderr output and return code here: if revision lookup failed we 61 # Ignore stderr output and return code here: if revision lookup failed w e
56 # should simply return an empty string. 62 # should simply return an empty string.
57 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0] 63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
58 return result.strip() 64 return result.strip()
59 65
60 def pull(self, repo): 66 def pull(self, repo):
61 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 67 subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet'])
62 68
63 def update(self, repo, rev): 69 def update(self, repo, rev, revname):
64 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev]) 70 subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', '--check', '--rev', rev])
65 71
66 def ignore(self, target, repo): 72 def ignore(self, target, repo):
67 73
68 if not self.istype(target): 74 if not self.istype(target):
69 75
70 config_path = os.path.join(repo, ".hg", "hgrc") 76 config_path = os.path.join(repo, '.hg', 'hgrc')
71 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies")) 77 ignore_path = os.path.abspath(os.path.join(repo, '.hg', 'dependencie s'))
72 78
73 config = RawConfigParser() 79 config = RawConfigParser()
74 config.read(config_path) 80 config.read(config_path)
75 81
76 if not config.has_section("ui"): 82 if not config.has_section('ui'):
77 config.add_section("ui") 83 config.add_section('ui')
78 84
79 config.set("ui", "ignore.dependencies", ignore_path) 85 config.set('ui', 'ignore.dependencies', ignore_path)
80 with open(config_path, "w") as stream: 86 with open(config_path, 'w') as stream:
81 config.write(stream) 87 config.write(stream)
82 88
83 module = os.path.relpath(target, repo) 89 module = os.path.relpath(target, repo)
84 _ensure_line_exists(ignore_path, module) 90 _ensure_line_exists(ignore_path, module)
85 91
86 def postprocess_url(self, url): 92 def postprocess_url(self, url):
87 return url 93 return url
94
88 95
89 class Git(): 96 class Git():
90 def istype(self, repodir): 97 def istype(self, repodir):
91 return os.path.exists(os.path.join(repodir, ".git")) 98 return os.path.exists(os.path.join(repodir, '.git'))
92 99
93 def clone(self, source, target): 100 def clone(self, source, target):
94 source = source.rstrip("/") 101 source = source.rstrip('/')
95 if not source.endswith(".git"): 102 if not source.endswith('.git'):
96 source += ".git" 103 source += '.git'
97 subprocess.check_call(["git", "clone", "--quiet", source, target]) 104 subprocess.check_call(['git', 'clone', '--quiet', source, target])
98 105
99 def get_revision_id(self, repo, rev="HEAD"): 106 def get_revision_id(self, repo, rev='HEAD'):
100 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 107 command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}']
101 return subprocess.check_output(command, cwd=repo).strip() 108 return subprocess.check_output(command, cwd=repo).strip()
102 109
103 def pull(self, repo): 110 def pull(self, repo):
104 # Fetch tracked branches, new tags and the list of available remote branches 111 # Fetch tracked branches, new tags and the list of available remote bran ches
105 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po) 112 subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw d=repo)
106 # Next we need to ensure all remote branches are tracked 113 # Next we need to ensure all remote branches are tracked
107 newly_tracked = False 114 newly_tracked = False
108 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo) 115 remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re po)
109 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): 116 for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M):
110 remote, local = match.groups() 117 remote, local = match.groups()
111 with open(os.devnull, "wb") as devnull: 118 with open(os.devnull, 'wb') as devnull:
112 if subprocess.call(["git", "branch", "--track", local, remote], 119 if subprocess.call(['git', 'branch', '--track', local, remote],
113 cwd=repo, stdout=devnull, stderr=devnull) == 0: 120 cwd=repo, stdout=devnull, stderr=devnull) == 0:
114 newly_tracked = True 121 newly_tracked = True
115 # Finally fetch any newly tracked remote branches 122 # Finally fetch any newly tracked remote branches
116 if newly_tracked: 123 if newly_tracked:
117 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo) 124 subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep o)
118 125
119 def update(self, repo, rev): 126 def update(self, repo, rev, revname):
120 subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo) 127 subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo)
121 128
122 def ignore(self, target, repo): 129 def ignore(self, target, repo):
123 module = os.path.relpath(target, repo) 130 module = os.path.sep + os.path.relpath(target, repo)
124 exclude_file = os.path.join(repo, ".git", "info", "exclude") 131 exclude_file = os.path.join(repo, '.git', 'info', 'exclude')
125 _ensure_line_exists(exclude_file, module) 132 _ensure_line_exists(exclude_file, module)
126 133
127 def postprocess_url(self, url): 134 def postprocess_url(self, url):
128 # Handle alternative syntax of SSH URLS 135 # Handle alternative syntax of SSH URLS
129 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 136 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme:
130 return "ssh://" + url.replace(":", "/", 1) 137 return 'ssh://' + url.replace(':', '/', 1)
131 return url 138 return url
132 139
133 repo_types = OrderedDict(( 140 repo_types = OrderedDict((
134 ("hg", Mercurial()), 141 ('hg', Mercurial()),
135 ("git", Git()), 142 ('git', Git()),
136 )) 143 ))
137 144
145 # [vcs:]value
146 item_regexp = re.compile(
147 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?'
148 '(.+)$'
149 )
150
151 # [url@]rev
152 source_regexp = re.compile(
153 '^(?:(.*)@)?'
154 '(.+)$'
155 )
156
157
158 def merge_seqs(seq1, seq2):
159 """Return a list of any truthy values from the suplied sequences
160
161 (None, 2), (1,) => [1, 2]
162 None, (1, 2) => [1, 2]
163 (1, 2), (3, 4) => [3, 4]
164 """
165 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
166
167
138 def parse_spec(path, line): 168 def parse_spec(path, line):
139 if "=" not in line: 169 if '=' not in line:
140 logging.warning("Invalid line in file %s: %s" % (path, line)) 170 logging.warning('Invalid line in file %s: %s' % (path, line))
141 return None, None 171 return None, None
142 172
143 key, value = line.split("=", 1) 173 key, value = line.split('=', 1)
144 key = key.strip() 174 key = key.strip()
145 items = value.split() 175 items = value.split()
146 if not len(items): 176 if not len(items):
147 logging.warning("No value specified for key %s in file %s" % (key, path)) 177 logging.warning('No value specified for key %s in file %s' % (key, path) )
148 return key, None 178 return key, None
149 179
150 result = OrderedDict() 180 result = OrderedDict()
151 if not key.startswith("_"): 181 is_dependency_field = not key.startswith('_')
152 result["_source"] = items.pop(0) 182
153 183 for i, item in enumerate(items):
154 for item in items: 184 try:
155 if ":" in item: 185 vcs, value = re.search(item_regexp, item).groups()
156 type, value = item.split(":", 1) 186 vcs = vcs or '*'
187 if is_dependency_field:
188 if i == 0 and vcs == '*':
189 # In order to be backwards compatible we have to assume that the first
190 # source contains only a URL/path for the repo if it does no t contain
191 # the VCS part
192 url_rev = (value, None)
193 else:
194 url_rev = re.search(source_regexp, value).groups()
195 result[vcs] = merge_seqs(result.get(vcs), url_rev)
196 else:
197 if vcs in result:
198 logging.warning('Ignoring duplicate value for type %r '
199 '(key %r in file %r)' % (vcs, key, path))
200 result[vcs] = value
201 except AttributeError:
202 logging.warning('Ignoring invalid item %r for type %r '
203 '(key %r in file %r)' % (item, vcs, key, path))
204 continue
205 return key, result
206
207
208 def read_deps(repodir):
209 result = {}
210 deps_path = os.path.join(repodir, 'dependencies')
211 try:
212 with io.open(deps_path, 'rt', encoding='utf-8') as handle:
213 for line in handle:
214 # Remove comments and whitespace
215 line = re.sub(r'#.*', '', line).strip()
216 if not line:
217 continue
218
219 key, spec = parse_spec(deps_path, line)
220 if spec:
221 result[key] = spec
222 return result
223 except IOError as e:
224 if e.errno != errno.ENOENT:
225 raise
226 return None
227
228
229 def safe_join(path, subpath):
230 # This has been inspired by Flask's safe_join() function
231 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
232 if any(sep in subpath for sep in forbidden):
233 raise Exception('Illegal directory separator in dependency path %s' % su bpath)
234
235 normpath = posixpath.normpath(subpath)
236 if posixpath.isabs(normpath):
237 raise Exception('Dependency path %s cannot be absolute' % subpath)
238 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
239 raise Exception('Dependency path %s has to be inside the repository' % s ubpath)
240 return os.path.join(path, *normpath.split(posixpath.sep))
241
242
243 def get_repo_type(repo):
244 for name, repotype in repo_types.iteritems():
245 if repotype.istype(repo):
246 return name
247 return 'hg'
248
249
250 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
251 if os.path.exists(target):
252 return
253
254 if SKIP_DEPENDENCY_UPDATES:
255 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
256 '%s not cloned', target)
257 return
258
259 postprocess_url = repo_types[type].postprocess_url
260 root = postprocess_url(root)
261 sourcename = postprocess_url(sourcename)
262
263 if os.path.exists(root):
264 url = os.path.join(root, sourcename)
157 else: 265 else:
158 type, value = ("*", item) 266 url = urlparse.urljoin(root, sourcename)
159 if type in result: 267
160 logging.warning("Ignoring duplicate value for type %s (key %s in file %s)" % (type, key, path)) 268 logging.info('Cloning repository %s into %s' % (url, target))
161 else: 269 repo_types[type].clone(url, target)
162 result[type] = value 270 repo_types[parenttype].ignore(target, parentrepo)
163 return key, result 271
164 272
165 def read_deps(repodir): 273 def update_repo(target, type, revision):
166 result = {} 274 resolved_revision = repo_types[type].get_revision_id(target, revision)
167 deps_path = os.path.join(repodir, "dependencies") 275 current_revision = repo_types[type].get_revision_id(target)
168 try: 276
169 with io.open(deps_path, "rt", encoding="utf-8") as handle: 277 if resolved_revision != current_revision:
170 for line in handle: 278 if SKIP_DEPENDENCY_UPDATES:
171 # Remove comments and whitespace 279 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
172 line = re.sub(r"#.*", "", line).strip() 280 '%s not checked out to %s', target, revision)
173 if not line: 281 return
174 continue 282
175 283 if not resolved_revision:
176 key, spec = parse_spec(deps_path, line) 284 logging.info('Revision %s is unknown, downloading remote changes' % revision)
177 if spec: 285 repo_types[type].pull(target)
178 result[key] = spec 286 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
179 return result 287 if not resolved_revision:
180 except IOError, e: 288 raise Exception('Failed to resolve revision %s' % revision)
181 if e.errno != errno.ENOENT: 289
182 raise 290 logging.info('Updating repository %s to revision %s' % (target, resolved _revision))
183 return None 291 repo_types[type].update(target, resolved_revision, revision)
184 292
185 def safe_join(path, subpath):
186 # This has been inspired by Flask's safe_join() function
187 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
188 if any(sep in subpath for sep in forbidden):
189 raise Exception("Illegal directory separator in dependency path %s" % subpat h)
190
191 normpath = posixpath.normpath(subpath)
192 if posixpath.isabs(normpath):
193 raise Exception("Dependency path %s cannot be absolute" % subpath)
194 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
195 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
196 return os.path.join(path, *normpath.split(posixpath.sep))
197
198 def get_repo_type(repo):
199 for name, repotype in repo_types.iteritems():
200 if repotype.istype(repo):
201 return name
202 return None
203
204 def ensure_repo(parentrepo, target, roots, sourcename):
205 if os.path.exists(target):
206 return
207
208 if SKIP_DEPENDENCY_UPDATES:
209 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
210 "%s not cloned", target)
211 return
212
213 parenttype = get_repo_type(parentrepo)
214 type = None
215 for key in roots:
216 if key == parenttype or (key in repo_types and type is None):
217 type = key
218 if type is None:
219 raise Exception("No valid source found to create %s" % target)
220
221 postprocess_url = repo_types[type].postprocess_url
222 root = postprocess_url(roots[type])
223 sourcename = postprocess_url(sourcename)
224
225 if os.path.exists(root):
226 url = os.path.join(root, sourcename)
227 else:
228 url = urlparse.urljoin(root, sourcename)
229
230 logging.info("Cloning repository %s into %s" % (url, target))
231 repo_types[type].clone(url, target)
232
233 for repo in repo_types.itervalues():
234 if repo.istype(parentrepo):
235 repo.ignore(target, parentrepo)
236
237 def update_repo(target, revisions):
238 type = get_repo_type(target)
239 if type is None:
240 logging.warning("Type of repository %s unknown, skipping update" % target)
241 return
242
243 if type in revisions:
244 revision = revisions[type]
245 elif "*" in revisions:
246 revision = revisions["*"]
247 else:
248 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
249 return
250
251 resolved_revision = repo_types[type].get_revision_id(target, revision)
252 current_revision = repo_types[type].get_revision_id(target)
253
254 if resolved_revision != current_revision:
255 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
257 "%s not checked out to %s", target, revision)
258 return
259
260 if not resolved_revision:
261 logging.info("Revision %s is unknown, downloading remote changes" % revisi on)
262 repo_types[type].pull(target)
263 resolved_revision = repo_types[type].get_revision_id(target, revision)
264 if not resolved_revision:
265 raise Exception("Failed to resolve revision %s" % revision)
266
267 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision))
268 repo_types[type].update(target, resolved_revision)
269 293
270 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 294 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
271 config = read_deps(repodir) 295 config = read_deps(repodir)
272 if config is None: 296 if config is None:
273 if level == 0: 297 if level == 0:
274 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 298 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE))
275 return 299 return
276 if level >= 10: 300 if level >= 10:
277 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 301 logging.warning('Too much subrepository nesting, ignoring %s' % repo)
278 return 302 return
279 303
280 if overrideroots is not None: 304 if overrideroots is not None:
281 config["_root"] = overrideroots 305 config['_root'] = overrideroots
282 306
283 for dir, revisions in config.iteritems(): 307 for dir, sources in config.iteritems():
284 if dir.startswith("_") or revisions["_source"] in skipdependencies: 308 if (dir.startswith('_') or
285 continue 309 skipdependencies.intersection([s[0] for s in sources if s[0]])):
286 target = safe_join(repodir, dir) 310 continue
287 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) 311
288 update_repo(target, revisions) 312 target = safe_join(repodir, dir)
289 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies) 313 parenttype = get_repo_type(repodir)
290 314 _root = config.get('_root', {})
291 if self_update and "_self" in config and "*" in config["_self"]: 315
292 source = safe_join(repodir, config["_self"]["*"]) 316 for key in sources.keys() + _root.keys():
293 try: 317 if key == parenttype or key is None and vcs != '*':
294 with io.open(source, "rb") as handle: 318 vcs = key
295 sourcedata = handle.read() 319 source, rev = merge_seqs(sources.get('*'), sources.get(vcs))
296 except IOError, e: 320
297 if e.errno != errno.ENOENT: 321 if not (vcs and source and rev):
298 raise 322 logging.warning('No valid source / revision found to create %s' % ta rget)
299 logging.warning("File %s doesn't exist, skipping self-update" % source) 323 continue
300 return 324
301 325 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source )
302 target = __file__ 326 update_repo(target, vcs, rev)
303 with io.open(target, "rb") as handle: 327 resolve_deps(target, level + 1, self_update=False,
304 targetdata = handle.read() 328 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
305 329
306 if sourcedata != targetdata: 330 if self_update and '_self' in config and '*' in config['_self']:
307 logging.info("Updating %s from %s, don't forget to commit" % (source, targ et)) 331 source = safe_join(repodir, config['_self']['*'])
308 with io.open(target, "wb") as handle: 332 try:
309 handle.write(sourcedata) 333 with io.open(source, 'rb') as handle:
310 if __name__ == "__main__": 334 sourcedata = handle.read()
311 logging.info("Restarting %s" % target) 335 except IOError as e:
312 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]) 336 if e.errno != errno.ENOENT:
313 else: 337 raise
314 logging.warning("Cannot restart %s automatically, please rerun" % target ) 338 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
339 return
340
341 target = __file__
342 with io.open(target, 'rb') as handle:
343 targetdata = handle.read()
344
345 if sourcedata != targetdata:
346 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
347 with io.open(target, 'wb') as handle:
348 handle.write(sourcedata)
349 if __name__ == '__main__':
350 logging.info('Restarting %s' % target)
351 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
352 else:
353 logging.warning('Cannot restart %s automatically, please rerun' % target)
354
315 355
316 def _ensure_line_exists(path, pattern): 356 def _ensure_line_exists(path, pattern):
317 with open(path, 'a+') as f: 357 with open(path, 'a+') as f:
318 file_content = [l.strip() for l in f.readlines()] 358 file_content = [l.strip() for l in f.readlines()]
319 if not pattern in file_content: 359 if not pattern in file_content:
320 file_content.append(pattern) 360 file_content.append(pattern)
321 f.seek(0, os.SEEK_SET) 361 f.seek(0, os.SEEK_SET)
322 f.truncate() 362 f.truncate()
323 for l in file_content: 363 for l in file_content:
324 print >>f, l 364 print >>f, l
325 365
326 if __name__ == "__main__": 366 if __name__ == '__main__':
327 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 367 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
328 368
329 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 369 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.')
330 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path") 370 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path')
331 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output") 371 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output')
332 args = parser.parse_args() 372 args = parser.parse_args()
333 373
334 if args.quiet: 374 if args.quiet:
335 logging.disable(logging.INFO) 375 logging.disable(logging.INFO)
336 376
337 repos = args.repos 377 repos = args.repos
338 if not len(repos): 378 if not len(repos):
339 repos = [os.path.dirname(__file__)] 379 repos = [os.path.dirname(__file__)]
340 for repo in repos: 380 for repo in repos:
341 resolve_deps(repo) 381 resolve_deps(repo)
OLDNEW
« no previous file with comments | « dependencies ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld