Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29345279: Noissue - Adapt quotes for compliance with our coding style in buildtools (Closed)
Patch Set: Created May 29, 2016, 1:27 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « chainedconfigparser.py ('k') | localeTools.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 2
3 # This Source Code Form is subject to the terms of the Mozilla Public 3 # This Source Code Form is subject to the terms of the Mozilla Public
4 # License, v. 2.0. If a copy of the MPL was not distributed with this 4 # License, v. 2.0. If a copy of the MPL was not distributed with this
5 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
6 6
7 import sys 7 import sys
8 import os 8 import os
9 import posixpath 9 import posixpath
10 import re 10 import re
11 import io 11 import io
12 import errno 12 import errno
13 import logging 13 import logging
14 import subprocess 14 import subprocess
15 import urlparse 15 import urlparse
16 import argparse 16 import argparse
17 17
18 from collections import OrderedDict 18 from collections import OrderedDict
19 from ConfigParser import RawConfigParser 19 from ConfigParser import RawConfigParser
20 20
21 USAGE = """ 21 USAGE = '''
22 A dependencies file should look like this: 22 A dependencies file should look like this:
23 23
24 # VCS-specific root URLs for the repositories 24 # VCS-specific root URLs for the repositories
25 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 25 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
26 # File to update this script from (optional) 26 # File to update this script from (optional)
27 _self = buildtools/ensure_dependencies.py 27 _self = buildtools/ensure_dependencies.py
28 # Clone elemhidehelper repository into extensions/elemhidehelper directory at 28 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
29 # tag "1.2". 29 # tag "1.2".
30 extensions/elemhidehelper = elemhidehelper 1.2 30 extensions/elemhidehelper = elemhidehelper 1.2
31 # Clone buildtools repository into buildtools directory at VCS-specific 31 # Clone buildtools repository into buildtools directory at VCS-specific
32 # revision IDs. 32 # revision IDs.
33 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 33 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
34 # Clone the adblockplus repository into adblockplus directory, overwriting the 34 # Clone the adblockplus repository into adblockplus directory, overwriting the
35 # usual source URL for Git repository and specifying VCS specific revision IDs . 35 # usual source URL for Git repository and specifying VCS specific revision IDs .
36 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b 36 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
37 # Clone the adblockpluschrome repository into the adblockpluschrome directory, 37 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
38 # from a specific Git repository, specifying the revision ID. 38 # from a specific Git repository, specifying the revision ID.
39 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 39 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
40 """ 40 '''
41 41
42 SKIP_DEPENDENCY_UPDATES = os.environ.get( 42 SKIP_DEPENDENCY_UPDATES = os.environ.get(
43 "SKIP_DEPENDENCY_UPDATES", "" 43 'SKIP_DEPENDENCY_UPDATES', ''
44 ).lower() not in ("", "0", "false") 44 ).lower() not in ('', '0', 'false')
45 45
46 46
47 class Mercurial(): 47 class Mercurial():
48 def istype(self, repodir): 48 def istype(self, repodir):
49 return os.path.exists(os.path.join(repodir, ".hg")) 49 return os.path.exists(os.path.join(repodir, '.hg'))
50 50
51 def clone(self, source, target): 51 def clone(self, source, target):
52 if not source.endswith("/"): 52 if not source.endswith('/'):
53 source += "/" 53 source += '/'
54 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, t arget]) 54 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget])
55 55
56 def get_revision_id(self, repo, rev=None): 56 def get_revision_id(self, repo, rev=None):
57 command = ["hg", "id", "--repository", repo, "--id"] 57 command = ['hg', 'id', '--repository', repo, '--id']
58 if rev: 58 if rev:
59 command.extend(["--rev", rev]) 59 command.extend(['--rev', rev])
60 60
61 # Ignore stderr output and return code here: if revision lookup failed w e 61 # Ignore stderr output and return code here: if revision lookup failed w e
62 # should simply return an empty string. 62 # should simply return an empty string.
63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0] 63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
64 return result.strip() 64 return result.strip()
65 65
66 def pull(self, repo): 66 def pull(self, repo):
67 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 67 subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet'])
68 68
69 def update(self, repo, rev, revname): 69 def update(self, repo, rev, revname):
70 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev]) 70 subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', '--check', '--rev', rev])
71 71
72 def ignore(self, target, repo): 72 def ignore(self, target, repo):
73 73
74 if not self.istype(target): 74 if not self.istype(target):
75 75
76 config_path = os.path.join(repo, ".hg", "hgrc") 76 config_path = os.path.join(repo, '.hg', 'hgrc')
77 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencie s")) 77 ignore_path = os.path.abspath(os.path.join(repo, '.hg', 'dependencie s'))
78 78
79 config = RawConfigParser() 79 config = RawConfigParser()
80 config.read(config_path) 80 config.read(config_path)
81 81
82 if not config.has_section("ui"): 82 if not config.has_section('ui'):
83 config.add_section("ui") 83 config.add_section('ui')
84 84
85 config.set("ui", "ignore.dependencies", ignore_path) 85 config.set('ui', 'ignore.dependencies', ignore_path)
86 with open(config_path, "w") as stream: 86 with open(config_path, 'w') as stream:
87 config.write(stream) 87 config.write(stream)
88 88
89 module = os.path.relpath(target, repo) 89 module = os.path.relpath(target, repo)
90 _ensure_line_exists(ignore_path, module) 90 _ensure_line_exists(ignore_path, module)
91 91
92 def postprocess_url(self, url): 92 def postprocess_url(self, url):
93 return url 93 return url
94 94
95 95
96 class Git(): 96 class Git():
97 def istype(self, repodir): 97 def istype(self, repodir):
98 return os.path.exists(os.path.join(repodir, ".git")) 98 return os.path.exists(os.path.join(repodir, '.git'))
99 99
100 def clone(self, source, target): 100 def clone(self, source, target):
101 source = source.rstrip("/") 101 source = source.rstrip('/')
102 if not source.endswith(".git"): 102 if not source.endswith('.git'):
103 source += ".git" 103 source += '.git'
104 subprocess.check_call(["git", "clone", "--quiet", source, target]) 104 subprocess.check_call(['git', 'clone', '--quiet', source, target])
105 105
106 def get_revision_id(self, repo, rev="HEAD"): 106 def get_revision_id(self, repo, rev='HEAD'):
107 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 107 command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}']
108 return subprocess.check_output(command, cwd=repo).strip() 108 return subprocess.check_output(command, cwd=repo).strip()
109 109
110 def pull(self, repo): 110 def pull(self, repo):
111 # Fetch tracked branches, new tags and the list of available remote bran ches 111 # Fetch tracked branches, new tags and the list of available remote bran ches
112 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cw d=repo) 112 subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw d=repo)
113 # Next we need to ensure all remote branches are tracked 113 # Next we need to ensure all remote branches are tracked
114 newly_tracked = False 114 newly_tracked = False
115 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=re po) 115 remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re po)
116 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): 116 for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M):
117 remote, local = match.groups() 117 remote, local = match.groups()
118 with open(os.devnull, "wb") as devnull: 118 with open(os.devnull, 'wb') as devnull:
119 if subprocess.call(["git", "branch", "--track", local, remote], 119 if subprocess.call(['git', 'branch', '--track', local, remote],
120 cwd=repo, stdout=devnull, stderr=devnull) == 0: 120 cwd=repo, stdout=devnull, stderr=devnull) == 0:
121 newly_tracked = True 121 newly_tracked = True
122 # Finally fetch any newly tracked remote branches 122 # Finally fetch any newly tracked remote branches
123 if newly_tracked: 123 if newly_tracked:
124 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=rep o) 124 subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep o)
125 125
126 def update(self, repo, rev, revname): 126 def update(self, repo, rev, revname):
127 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo) 127 subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo)
128 128
129 def ignore(self, target, repo): 129 def ignore(self, target, repo):
130 module = os.path.sep + os.path.relpath(target, repo) 130 module = os.path.sep + os.path.relpath(target, repo)
131 exclude_file = os.path.join(repo, ".git", "info", "exclude") 131 exclude_file = os.path.join(repo, '.git', 'info', 'exclude')
132 _ensure_line_exists(exclude_file, module) 132 _ensure_line_exists(exclude_file, module)
133 133
134 def postprocess_url(self, url): 134 def postprocess_url(self, url):
135 # Handle alternative syntax of SSH URLS 135 # Handle alternative syntax of SSH URLS
136 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 136 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme:
137 return "ssh://" + url.replace(":", "/", 1) 137 return 'ssh://' + url.replace(':', '/', 1)
138 return url 138 return url
139 139
140 repo_types = OrderedDict(( 140 repo_types = OrderedDict((
141 ("hg", Mercurial()), 141 ('hg', Mercurial()),
142 ("git", Git()), 142 ('git', Git()),
143 )) 143 ))
144 144
145 # [vcs:]value 145 # [vcs:]value
146 item_regexp = re.compile( 146 item_regexp = re.compile(
147 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?" 147 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?'
148 "(.+)$" 148 '(.+)$'
149 ) 149 )
150 150
151 # [url@]rev 151 # [url@]rev
152 source_regexp = re.compile( 152 source_regexp = re.compile(
153 "^(?:(.*)@)?" 153 '^(?:(.*)@)?'
154 "(.+)$" 154 '(.+)$'
155 ) 155 )
156 156
157 157
158 def merge_seqs(seq1, seq2): 158 def merge_seqs(seq1, seq2):
159 """Return a list of any truthy values from the suplied sequences 159 """Return a list of any truthy values from the suplied sequences
160 160
161 (None, 2), (1,) => [1, 2] 161 (None, 2), (1,) => [1, 2]
162 None, (1, 2) => [1, 2] 162 None, (1, 2) => [1, 2]
163 (1, 2), (3, 4) => [3, 4] 163 (1, 2), (3, 4) => [3, 4]
164 """ 164 """
165 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ()) 165 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
166 166
167 167
168 def parse_spec(path, line): 168 def parse_spec(path, line):
169 if "=" not in line: 169 if '=' not in line:
170 logging.warning("Invalid line in file %s: %s" % (path, line)) 170 logging.warning('Invalid line in file %s: %s' % (path, line))
171 return None, None 171 return None, None
172 172
173 key, value = line.split("=", 1) 173 key, value = line.split('=', 1)
174 key = key.strip() 174 key = key.strip()
175 items = value.split() 175 items = value.split()
176 if not len(items): 176 if not len(items):
177 logging.warning("No value specified for key %s in file %s" % (key, path) ) 177 logging.warning('No value specified for key %s in file %s' % (key, path) )
178 return key, None 178 return key, None
179 179
180 result = OrderedDict() 180 result = OrderedDict()
181 is_dependency_field = not key.startswith("_") 181 is_dependency_field = not key.startswith('_')
182 182
183 for i, item in enumerate(items): 183 for i, item in enumerate(items):
184 try: 184 try:
185 vcs, value = re.search(item_regexp, item).groups() 185 vcs, value = re.search(item_regexp, item).groups()
186 vcs = vcs or "*" 186 vcs = vcs or '*'
187 if is_dependency_field: 187 if is_dependency_field:
188 if i == 0 and vcs == "*": 188 if i == 0 and vcs == '*':
189 # In order to be backwards compatible we have to assume that the first 189 # In order to be backwards compatible we have to assume that the first
190 # source contains only a URL/path for the repo if it does no t contain 190 # source contains only a URL/path for the repo if it does no t contain
191 # the VCS part 191 # the VCS part
192 url_rev = (value, None) 192 url_rev = (value, None)
193 else: 193 else:
194 url_rev = re.search(source_regexp, value).groups() 194 url_rev = re.search(source_regexp, value).groups()
195 result[vcs] = merge_seqs(result.get(vcs), url_rev) 195 result[vcs] = merge_seqs(result.get(vcs), url_rev)
196 else: 196 else:
197 if vcs in result: 197 if vcs in result:
198 logging.warning("Ignoring duplicate value for type %r " 198 logging.warning('Ignoring duplicate value for type %r '
199 "(key %r in file %r)" % (vcs, key, path)) 199 '(key %r in file %r)' % (vcs, key, path))
200 result[vcs] = value 200 result[vcs] = value
201 except AttributeError: 201 except AttributeError:
202 logging.warning("Ignoring invalid item %r for type %r " 202 logging.warning('Ignoring invalid item %r for type %r '
203 "(key %r in file %r)" % (item, vcs, key, path)) 203 '(key %r in file %r)' % (item, vcs, key, path))
204 continue 204 continue
205 return key, result 205 return key, result
206 206
207 207
208 def read_deps(repodir): 208 def read_deps(repodir):
209 result = {} 209 result = {}
210 deps_path = os.path.join(repodir, "dependencies") 210 deps_path = os.path.join(repodir, 'dependencies')
211 try: 211 try:
212 with io.open(deps_path, "rt", encoding="utf-8") as handle: 212 with io.open(deps_path, 'rt', encoding='utf-8') as handle:
213 for line in handle: 213 for line in handle:
214 # Remove comments and whitespace 214 # Remove comments and whitespace
215 line = re.sub(r"#.*", "", line).strip() 215 line = re.sub(r'#.*', '', line).strip()
216 if not line: 216 if not line:
217 continue 217 continue
218 218
219 key, spec = parse_spec(deps_path, line) 219 key, spec = parse_spec(deps_path, line)
220 if spec: 220 if spec:
221 result[key] = spec 221 result[key] = spec
222 return result 222 return result
223 except IOError, e: 223 except IOError, e:
224 if e.errno != errno.ENOENT: 224 if e.errno != errno.ENOENT:
225 raise 225 raise
226 return None 226 return None
227 227
228 228
229 def safe_join(path, subpath): 229 def safe_join(path, subpath):
230 # This has been inspired by Flask's safe_join() function 230 # This has been inspired by Flask's safe_join() function
231 forbidden = {os.sep, os.altsep} - {posixpath.sep, None} 231 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
232 if any(sep in subpath for sep in forbidden): 232 if any(sep in subpath for sep in forbidden):
233 raise Exception("Illegal directory separator in dependency path %s" % su bpath) 233 raise Exception('Illegal directory separator in dependency path %s' % su bpath)
234 234
235 normpath = posixpath.normpath(subpath) 235 normpath = posixpath.normpath(subpath)
236 if posixpath.isabs(normpath): 236 if posixpath.isabs(normpath):
237 raise Exception("Dependency path %s cannot be absolute" % subpath) 237 raise Exception('Dependency path %s cannot be absolute' % subpath)
238 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep): 238 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
239 raise Exception("Dependency path %s has to be inside the repository" % s ubpath) 239 raise Exception('Dependency path %s has to be inside the repository' % s ubpath)
240 return os.path.join(path, *normpath.split(posixpath.sep)) 240 return os.path.join(path, *normpath.split(posixpath.sep))
241 241
242 242
243 def get_repo_type(repo): 243 def get_repo_type(repo):
244 for name, repotype in repo_types.iteritems(): 244 for name, repotype in repo_types.iteritems():
245 if repotype.istype(repo): 245 if repotype.istype(repo):
246 return name 246 return name
247 return "hg" 247 return 'hg'
248 248
249 249
250 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): 250 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
251 if os.path.exists(target): 251 if os.path.exists(target):
252 return 252 return
253 253
254 if SKIP_DEPENDENCY_UPDATES: 254 if SKIP_DEPENDENCY_UPDATES:
255 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 255 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
256 "%s not cloned", target) 256 '%s not cloned', target)
257 return 257 return
258 258
259 postprocess_url = repo_types[type].postprocess_url 259 postprocess_url = repo_types[type].postprocess_url
260 root = postprocess_url(root) 260 root = postprocess_url(root)
261 sourcename = postprocess_url(sourcename) 261 sourcename = postprocess_url(sourcename)
262 262
263 if os.path.exists(root): 263 if os.path.exists(root):
264 url = os.path.join(root, sourcename) 264 url = os.path.join(root, sourcename)
265 else: 265 else:
266 url = urlparse.urljoin(root, sourcename) 266 url = urlparse.urljoin(root, sourcename)
267 267
268 logging.info("Cloning repository %s into %s" % (url, target)) 268 logging.info('Cloning repository %s into %s' % (url, target))
269 repo_types[type].clone(url, target) 269 repo_types[type].clone(url, target)
270 repo_types[parenttype].ignore(target, parentrepo) 270 repo_types[parenttype].ignore(target, parentrepo)
271 271
272 272
273 def update_repo(target, type, revision): 273 def update_repo(target, type, revision):
274 resolved_revision = repo_types[type].get_revision_id(target, revision) 274 resolved_revision = repo_types[type].get_revision_id(target, revision)
275 current_revision = repo_types[type].get_revision_id(target) 275 current_revision = repo_types[type].get_revision_id(target)
276 276
277 if resolved_revision != current_revision: 277 if resolved_revision != current_revision:
278 if SKIP_DEPENDENCY_UPDATES: 278 if SKIP_DEPENDENCY_UPDATES:
279 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 279 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
280 "%s not checked out to %s", target, revision) 280 '%s not checked out to %s', target, revision)
281 return 281 return
282 282
283 if not resolved_revision: 283 if not resolved_revision:
284 logging.info("Revision %s is unknown, downloading remote changes" % revision) 284 logging.info('Revision %s is unknown, downloading remote changes' % revision)
285 repo_types[type].pull(target) 285 repo_types[type].pull(target)
286 resolved_revision = repo_types[type].get_revision_id(target, revisio n) 286 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
287 if not resolved_revision: 287 if not resolved_revision:
288 raise Exception("Failed to resolve revision %s" % revision) 288 raise Exception('Failed to resolve revision %s' % revision)
289 289
290 logging.info("Updating repository %s to revision %s" % (target, resolved _revision)) 290 logging.info('Updating repository %s to revision %s' % (target, resolved _revision))
291 repo_types[type].update(target, resolved_revision, revision) 291 repo_types[type].update(target, resolved_revision, revision)
292 292
293 293
294 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 294 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
295 config = read_deps(repodir) 295 config = read_deps(repodir)
296 if config is None: 296 if config is None:
297 if level == 0: 297 if level == 0:
298 logging.warning("No dependencies file in directory %s, nothing to do ...\n%s" % (repodir, USAGE)) 298 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE))
299 return 299 return
300 if level >= 10: 300 if level >= 10:
301 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 301 logging.warning('Too much subrepository nesting, ignoring %s' % repo)
302 return 302 return
303 303
304 if overrideroots is not None: 304 if overrideroots is not None:
305 config["_root"] = overrideroots 305 config['_root'] = overrideroots
306 306
307 for dir, sources in config.iteritems(): 307 for dir, sources in config.iteritems():
308 if (dir.startswith("_") or 308 if (dir.startswith('_') or
309 skipdependencies.intersection([s[0] for s in sources if s[0]])): 309 skipdependencies.intersection([s[0] for s in sources if s[0]])):
310 continue 310 continue
311 311
312 target = safe_join(repodir, dir) 312 target = safe_join(repodir, dir)
313 parenttype = get_repo_type(repodir) 313 parenttype = get_repo_type(repodir)
314 _root = config.get("_root", {}) 314 _root = config.get('_root', {})
315 315
316 for key in sources.keys() + _root.keys(): 316 for key in sources.keys() + _root.keys():
317 if key == parenttype or key is None and vcs != "*": 317 if key == parenttype or key is None and vcs != '*':
318 vcs = key 318 vcs = key
319 source, rev = merge_seqs(sources.get("*"), sources.get(vcs)) 319 source, rev = merge_seqs(sources.get('*'), sources.get(vcs))
320 320
321 if not (vcs and source and rev): 321 if not (vcs and source and rev):
322 logging.warning("No valid source / revision found to create %s" % ta rget) 322 logging.warning('No valid source / revision found to create %s' % ta rget)
323 continue 323 continue
324 324
325 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source ) 325 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source )
326 update_repo(target, vcs, rev) 326 update_repo(target, vcs, rev)
327 resolve_deps(target, level + 1, self_update=False, 327 resolve_deps(target, level + 1, self_update=False,
328 overrideroots=overrideroots, skipdependencies=skipdependenc ies) 328 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
329 329
330 if self_update and "_self" in config and "*" in config["_self"]: 330 if self_update and '_self' in config and '*' in config['_self']:
331 source = safe_join(repodir, config["_self"]["*"]) 331 source = safe_join(repodir, config['_self']['*'])
332 try: 332 try:
333 with io.open(source, "rb") as handle: 333 with io.open(source, 'rb') as handle:
334 sourcedata = handle.read() 334 sourcedata = handle.read()
335 except IOError, e: 335 except IOError, e:
336 if e.errno != errno.ENOENT: 336 if e.errno != errno.ENOENT:
337 raise 337 raise
338 logging.warning("File %s doesn't exist, skipping self-update" % sour ce) 338 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
339 return 339 return
340 340
341 target = __file__ 341 target = __file__
342 with io.open(target, "rb") as handle: 342 with io.open(target, 'rb') as handle:
343 targetdata = handle.read() 343 targetdata = handle.read()
344 344
345 if sourcedata != targetdata: 345 if sourcedata != targetdata:
346 logging.info("Updating %s from %s, don't forget to commit" % (target , source)) 346 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
347 with io.open(target, "wb") as handle: 347 with io.open(target, 'wb') as handle:
348 handle.write(sourcedata) 348 handle.write(sourcedata)
349 if __name__ == "__main__": 349 if __name__ == '__main__':
350 logging.info("Restarting %s" % target) 350 logging.info('Restarting %s' % target)
351 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] ) 351 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
352 else: 352 else:
353 logging.warning("Cannot restart %s automatically, please rerun" % target) 353 logging.warning('Cannot restart %s automatically, please rerun' % target)
354 354
355 355
356 def _ensure_line_exists(path, pattern): 356 def _ensure_line_exists(path, pattern):
357 with open(path, 'a+') as f: 357 with open(path, 'a+') as f:
358 file_content = [l.strip() for l in f.readlines()] 358 file_content = [l.strip() for l in f.readlines()]
359 if not pattern in file_content: 359 if not pattern in file_content:
360 file_content.append(pattern) 360 file_content.append(pattern)
361 f.seek(0, os.SEEK_SET) 361 f.seek(0, os.SEEK_SET)
362 f.truncate() 362 f.truncate()
363 for l in file_content: 363 for l in file_content:
364 print >>f, l 364 print >>f, l
365 365
366 if __name__ == "__main__": 366 if __name__ == '__main__':
367 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 367 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
368 368
369 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 369 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.')
370 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help ="Repository path") 370 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path')
371 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress inf ormational output") 371 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output')
372 args = parser.parse_args() 372 args = parser.parse_args()
373 373
374 if args.quiet: 374 if args.quiet:
375 logging.disable(logging.INFO) 375 logging.disable(logging.INFO)
376 376
377 repos = args.repos 377 repos = args.repos
378 if not len(repos): 378 if not len(repos):
379 repos = [os.path.dirname(__file__)] 379 repos = [os.path.dirname(__file__)]
380 for repo in repos: 380 for repo in repos:
381 resolve_deps(repo) 381 resolve_deps(repo)
OLDNEW
« no previous file with comments | « chainedconfigparser.py ('k') | localeTools.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld