Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29372829: Issue 4833 - Enable translations for adblockpluscore (Closed)
Patch Set: Removed `-t generic` parameter, updated ensure_dependencies.py Created Jan. 25, 2017, 3:53 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « build.py ('k') | metadata.generic » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8
3 2
4 # This Source Code Form is subject to the terms of the Mozilla Public 3 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 4 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 6
8 import sys 7 import sys
9 import os 8 import os
10 import posixpath 9 import posixpath
11 import re 10 import re
12 import io 11 import io
13 import errno 12 import errno
14 import logging 13 import logging
15 import subprocess 14 import subprocess
16 import urlparse 15 import urlparse
17 import argparse 16 import argparse
18 17
19 from collections import OrderedDict 18 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 19 from ConfigParser import RawConfigParser
21 20
22 USAGE = """ 21 USAGE = '''
23 A dependencies file should look like this: 22 A dependencies file should look like this:
24 23
25 # VCS-specific root URLs for the repositories 24 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 25 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 26 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 27 _self = buildtools/ensure_dependencies.py
29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at 28 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # tag "1.2". 29 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 30 extensions/elemhidehelper = elemhidehelper 1.2
32 # Clone buildtools repository into buildtools directory at VCS-specific 31 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 32 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 33 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the 34 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs . 35 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b 36 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory, 37 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID. 38 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 39 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
41 """ 40 '''
42 41
43 SKIP_DEPENDENCY_UPDATES = os.environ.get( 42 SKIP_DEPENDENCY_UPDATES = os.environ.get(
44 "SKIP_DEPENDENCY_UPDATES", "" 43 'SKIP_DEPENDENCY_UPDATES', ''
45 ).lower() not in ("", "0", "false") 44 ).lower() not in ('', '0', 'false')
46 45
47 46
48 class Mercurial(): 47 class Mercurial():
49 def istype(self, repodir): 48 def istype(self, repodir):
50 return os.path.exists(os.path.join(repodir, ".hg")) 49 return os.path.exists(os.path.join(repodir, '.hg'))
51 50
52 def clone(self, source, target): 51 def clone(self, source, target):
53 if not source.endswith("/"): 52 if not source.endswith('/'):
54 source += "/" 53 source += '/'
55 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, t arget]) 54 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget])
56 55
57 def get_revision_id(self, repo, rev=None): 56 def get_revision_id(self, repo, rev=None):
58 command = ["hg", "id", "--repository", repo, "--id"] 57 command = ['hg', 'id', '--repository', repo, '--id']
59 if rev: 58 if rev:
60 command.extend(["--rev", rev]) 59 command.extend(['--rev', rev])
61 60
62 # Ignore stderr output and return code here: if revision lookup failed w e 61 # Ignore stderr output and return code here: if revision lookup failed w e
63 # should simply return an empty string. 62 # should simply return an empty string.
64 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0] 63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
65 return result.strip() 64 return result.strip()
66 65
67 def pull(self, repo): 66 def pull(self, repo):
68 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 67 subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet'])
69 68
70 def update(self, repo, rev, revname): 69 def update(self, repo, rev, revname):
71 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev]) 70 subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', '--check', '--rev', rev])
72 71
73 def ignore(self, target, repo): 72 def ignore(self, target, repo):
73 config_path = os.path.join(repo, '.hg', 'hgrc')
74 ignore_file = os.path.join('.hg', 'dependencies')
75 ignore_path = os.path.join(repo, ignore_file)
74 76
75 if not self.istype(target): 77 config = RawConfigParser()
78 config.read(config_path)
76 79
77 config_path = os.path.join(repo, ".hg", "hgrc") 80 if not config.has_section('ui'):
78 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencie s")) 81 config.add_section('ui')
79 82
80 config = RawConfigParser() 83 config.set('ui', 'ignore.dependencies', ignore_file)
81 config.read(config_path) 84 with open(config_path, 'w') as stream:
85 config.write(stream)
82 86
83 if not config.has_section("ui"): 87 module = os.path.relpath(target, repo)
84 config.add_section("ui") 88 _ensure_line_exists(ignore_path, module)
85
86 config.set("ui", "ignore.dependencies", ignore_path)
87 with open(config_path, "w") as stream:
88 config.write(stream)
89
90 module = os.path.relpath(target, repo)
91 _ensure_line_exists(ignore_path, module)
92 89
93 def postprocess_url(self, url): 90 def postprocess_url(self, url):
94 return url 91 return url
95 92
96 93
97 class Git(): 94 class Git():
98 def istype(self, repodir): 95 def istype(self, repodir):
99 return os.path.exists(os.path.join(repodir, ".git")) 96 return os.path.exists(os.path.join(repodir, '.git'))
100 97
101 def clone(self, source, target): 98 def clone(self, source, target):
102 source = source.rstrip("/") 99 source = source.rstrip('/')
103 if not source.endswith(".git"): 100 if not source.endswith('.git'):
104 source += ".git" 101 source += '.git'
105 subprocess.check_call(["git", "clone", "--quiet", source, target]) 102 subprocess.check_call(['git', 'clone', '--quiet', source, target])
106 103
107 def get_revision_id(self, repo, rev="HEAD"): 104 def get_revision_id(self, repo, rev='HEAD'):
108 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 105 command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}']
109 return subprocess.check_output(command, cwd=repo).strip() 106 return subprocess.check_output(command, cwd=repo).strip()
110 107
111 def pull(self, repo): 108 def pull(self, repo):
112 # Fetch tracked branches, new tags and the list of available remote bran ches 109 # Fetch tracked branches, new tags and the list of available remote bran ches
113 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cw d=repo) 110 subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw d=repo)
114 # Next we need to ensure all remote branches are tracked 111 # Next we need to ensure all remote branches are tracked
115 newly_tracked = False 112 newly_tracked = False
116 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=re po) 113 remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re po)
117 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): 114 for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M):
118 remote, local = match.groups() 115 remote, local = match.groups()
119 with open(os.devnull, "wb") as devnull: 116 with open(os.devnull, 'wb') as devnull:
120 if subprocess.call(["git", "branch", "--track", local, remote], 117 if subprocess.call(['git', 'branch', '--track', local, remote],
121 cwd=repo, stdout=devnull, stderr=devnull) == 0: 118 cwd=repo, stdout=devnull, stderr=devnull) == 0:
122 newly_tracked = True 119 newly_tracked = True
123 # Finally fetch any newly tracked remote branches 120 # Finally fetch any newly tracked remote branches
124 if newly_tracked: 121 if newly_tracked:
125 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=rep o) 122 subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep o)
126 123
127 def update(self, repo, rev, revname): 124 def update(self, repo, rev, revname):
128 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo) 125 subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo)
129 126
130 def ignore(self, target, repo): 127 def ignore(self, target, repo):
131 module = os.path.sep + os.path.relpath(target, repo) 128 module = os.path.sep + os.path.relpath(target, repo)
132 exclude_file = os.path.join(repo, ".git", "info", "exclude") 129 exclude_file = os.path.join(repo, '.git', 'info', 'exclude')
133 _ensure_line_exists(exclude_file, module) 130 _ensure_line_exists(exclude_file, module)
134 131
135 def postprocess_url(self, url): 132 def postprocess_url(self, url):
136 # Handle alternative syntax of SSH URLS 133 # Handle alternative syntax of SSH URLS
137 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 134 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme:
138 return "ssh://" + url.replace(":", "/", 1) 135 return 'ssh://' + url.replace(':', '/', 1)
139 return url 136 return url
140 137
141 repo_types = OrderedDict(( 138 repo_types = OrderedDict((
142 ("hg", Mercurial()), 139 ('hg', Mercurial()),
143 ("git", Git()), 140 ('git', Git()),
144 )) 141 ))
145 142
146 # [vcs:]value 143 # [vcs:]value
147 item_regexp = re.compile( 144 item_regexp = re.compile(
148 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?" 145 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?'
149 "(.+)$" 146 '(.+)$'
150 ) 147 )
151 148
152 # [url@]rev 149 # [url@]rev
153 source_regexp = re.compile( 150 source_regexp = re.compile(
154 "^(?:(.*)@)?" 151 '^(?:(.*)@)?'
155 "(.+)$" 152 '(.+)$'
156 ) 153 )
157 154
158 155
159 def merge_seqs(seq1, seq2): 156 def merge_seqs(seq1, seq2):
160 """Return a list of any truthy values from the suplied sequences 157 """Return a list of any truthy values from the suplied sequences
161 158
162 (None, 2), (1,) => [1, 2] 159 (None, 2), (1,) => [1, 2]
163 None, (1, 2) => [1, 2] 160 None, (1, 2) => [1, 2]
164 (1, 2), (3, 4) => [3, 4] 161 (1, 2), (3, 4) => [3, 4]
165 """ 162 """
166 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ()) 163 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
167 164
168 165
169 def parse_spec(path, line): 166 def parse_spec(path, line):
170 if "=" not in line: 167 if '=' not in line:
171 logging.warning("Invalid line in file %s: %s" % (path, line)) 168 logging.warning('Invalid line in file %s: %s' % (path, line))
172 return None, None 169 return None, None
173 170
174 key, value = line.split("=", 1) 171 key, value = line.split('=', 1)
175 key = key.strip() 172 key = key.strip()
176 items = value.split() 173 items = value.split()
177 if not len(items): 174 if not len(items):
178 logging.warning("No value specified for key %s in file %s" % (key, path) ) 175 logging.warning('No value specified for key %s in file %s' % (key, path) )
179 return key, None 176 return key, None
180 177
181 result = OrderedDict() 178 result = OrderedDict()
182 is_dependency_field = not key.startswith("_") 179 is_dependency_field = not key.startswith('_')
183 180
184 for i, item in enumerate(items): 181 for i, item in enumerate(items):
185 try: 182 try:
186 vcs, value = re.search(item_regexp, item).groups() 183 vcs, value = re.search(item_regexp, item).groups()
187 vcs = vcs or "*" 184 vcs = vcs or '*'
188 if is_dependency_field: 185 if is_dependency_field:
189 if i == 0 and vcs == "*": 186 if i == 0 and vcs == '*':
190 # In order to be backwards compatible we have to assume that the first 187 # In order to be backwards compatible we have to assume that the first
191 # source contains only a URL/path for the repo if it does no t contain 188 # source contains only a URL/path for the repo if it does no t contain
192 # the VCS part 189 # the VCS part
193 url_rev = (value, None) 190 url_rev = (value, None)
194 else: 191 else:
195 url_rev = re.search(source_regexp, value).groups() 192 url_rev = re.search(source_regexp, value).groups()
196 result[vcs] = merge_seqs(result.get(vcs), url_rev) 193 result[vcs] = merge_seqs(result.get(vcs), url_rev)
197 else: 194 else:
198 if vcs in result: 195 if vcs in result:
199 logging.warning("Ignoring duplicate value for type %r " 196 logging.warning('Ignoring duplicate value for type %r '
200 "(key %r in file %r)" % (vcs, key, path)) 197 '(key %r in file %r)' % (vcs, key, path))
201 result[vcs] = value 198 result[vcs] = value
202 except AttributeError: 199 except AttributeError:
203 logging.warning("Ignoring invalid item %r for type %r " 200 logging.warning('Ignoring invalid item %r for type %r '
204 "(key %r in file %r)" % (item, vcs, key, path)) 201 '(key %r in file %r)' % (item, vcs, key, path))
205 continue 202 continue
206 return key, result 203 return key, result
207 204
208 205
209 def read_deps(repodir): 206 def read_deps(repodir):
210 result = {} 207 result = {}
211 deps_path = os.path.join(repodir, "dependencies") 208 deps_path = os.path.join(repodir, 'dependencies')
212 try: 209 try:
213 with io.open(deps_path, "rt", encoding="utf-8") as handle: 210 with io.open(deps_path, 'rt', encoding='utf-8') as handle:
214 for line in handle: 211 for line in handle:
215 # Remove comments and whitespace 212 # Remove comments and whitespace
216 line = re.sub(r"#.*", "", line).strip() 213 line = re.sub(r'#.*', '', line).strip()
217 if not line: 214 if not line:
218 continue 215 continue
219 216
220 key, spec = parse_spec(deps_path, line) 217 key, spec = parse_spec(deps_path, line)
221 if spec: 218 if spec:
222 result[key] = spec 219 result[key] = spec
223 return result 220 return result
224 except IOError, e: 221 except IOError as e:
225 if e.errno != errno.ENOENT: 222 if e.errno != errno.ENOENT:
226 raise 223 raise
227 return None 224 return None
228 225
229 226
230 def safe_join(path, subpath): 227 def safe_join(path, subpath):
231 # This has been inspired by Flask's safe_join() function 228 # This has been inspired by Flask's safe_join() function
232 forbidden = {os.sep, os.altsep} - {posixpath.sep, None} 229 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
233 if any(sep in subpath for sep in forbidden): 230 if any(sep in subpath for sep in forbidden):
234 raise Exception("Illegal directory separator in dependency path %s" % su bpath) 231 raise Exception('Illegal directory separator in dependency path %s' % su bpath)
235 232
236 normpath = posixpath.normpath(subpath) 233 normpath = posixpath.normpath(subpath)
237 if posixpath.isabs(normpath): 234 if posixpath.isabs(normpath):
238 raise Exception("Dependency path %s cannot be absolute" % subpath) 235 raise Exception('Dependency path %s cannot be absolute' % subpath)
239 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep): 236 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
240 raise Exception("Dependency path %s has to be inside the repository" % s ubpath) 237 raise Exception('Dependency path %s has to be inside the repository' % s ubpath)
241 return os.path.join(path, *normpath.split(posixpath.sep)) 238 return os.path.join(path, *normpath.split(posixpath.sep))
242 239
243 240
244 def get_repo_type(repo): 241 def get_repo_type(repo):
245 for name, repotype in repo_types.iteritems(): 242 for name, repotype in repo_types.iteritems():
246 if repotype.istype(repo): 243 if repotype.istype(repo):
247 return name 244 return name
248 return "hg" 245 return 'hg'
249 246
250 247
251 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): 248 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
252 if os.path.exists(target): 249 if os.path.exists(target):
253 return 250 return
254 251
255 if SKIP_DEPENDENCY_UPDATES: 252 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 253 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
257 "%s not cloned", target) 254 '%s not cloned', target)
258 return 255 return
259 256
260 postprocess_url = repo_types[type].postprocess_url 257 postprocess_url = repo_types[type].postprocess_url
261 root = postprocess_url(root) 258 root = postprocess_url(root)
262 sourcename = postprocess_url(sourcename) 259 sourcename = postprocess_url(sourcename)
263 260
264 if os.path.exists(root): 261 if os.path.exists(root):
265 url = os.path.join(root, sourcename) 262 url = os.path.join(root, sourcename)
266 else: 263 else:
267 url = urlparse.urljoin(root, sourcename) 264 url = urlparse.urljoin(root, sourcename)
268 265
269 logging.info("Cloning repository %s into %s" % (url, target)) 266 logging.info('Cloning repository %s into %s' % (url, target))
270 repo_types[type].clone(url, target) 267 repo_types[type].clone(url, target)
271 repo_types[parenttype].ignore(target, parentrepo) 268 repo_types[parenttype].ignore(target, parentrepo)
272 269
273 270
274 def update_repo(target, type, revision): 271 def update_repo(target, type, revision):
275 resolved_revision = repo_types[type].get_revision_id(target, revision) 272 resolved_revision = repo_types[type].get_revision_id(target, revision)
276 current_revision = repo_types[type].get_revision_id(target) 273 current_revision = repo_types[type].get_revision_id(target)
277 274
278 if resolved_revision != current_revision: 275 if resolved_revision != current_revision:
279 if SKIP_DEPENDENCY_UPDATES: 276 if SKIP_DEPENDENCY_UPDATES:
280 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 277 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
281 "%s not checked out to %s", target, revision) 278 '%s not checked out to %s', target, revision)
282 return 279 return
283 280
284 if not resolved_revision: 281 if not resolved_revision:
285 logging.info("Revision %s is unknown, downloading remote changes" % revision) 282 logging.info('Revision %s is unknown, downloading remote changes' % revision)
286 repo_types[type].pull(target) 283 repo_types[type].pull(target)
287 resolved_revision = repo_types[type].get_revision_id(target, revisio n) 284 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
288 if not resolved_revision: 285 if not resolved_revision:
289 raise Exception("Failed to resolve revision %s" % revision) 286 raise Exception('Failed to resolve revision %s' % revision)
290 287
291 logging.info("Updating repository %s to revision %s" % (target, resolved _revision)) 288 logging.info('Updating repository %s to revision %s' % (target, resolved _revision))
292 repo_types[type].update(target, resolved_revision, revision) 289 repo_types[type].update(target, resolved_revision, revision)
293 290
294 291
295 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 292 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
296 config = read_deps(repodir) 293 config = read_deps(repodir)
297 if config is None: 294 if config is None:
298 if level == 0: 295 if level == 0:
299 logging.warning("No dependencies file in directory %s, nothing to do ...\n%s" % (repodir, USAGE)) 296 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE))
300 return 297 return
301 if level >= 10: 298 if level >= 10:
302 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 299 logging.warning('Too much subrepository nesting, ignoring %s' % repo)
303 return 300 return
304 301
305 if overrideroots is not None: 302 if overrideroots is not None:
306 config["_root"] = overrideroots 303 config['_root'] = overrideroots
307 304
308 for dir, sources in config.iteritems(): 305 for dir, sources in config.iteritems():
309 if (dir.startswith("_") or 306 if (dir.startswith('_') or
310 skipdependencies.intersection([s[0] for s in sources if s[0]])): 307 skipdependencies.intersection([s[0] for s in sources if s[0]])):
311 continue 308 continue
312 309
313 target = safe_join(repodir, dir) 310 target = safe_join(repodir, dir)
314 parenttype = get_repo_type(repodir) 311 parenttype = get_repo_type(repodir)
315 _root = config.get("_root", {}) 312 _root = config.get('_root', {})
316 313
317 for key in sources.keys() + _root.keys(): 314 for key in sources.keys() + _root.keys():
318 if key == parenttype or key is None and vcs != "*": 315 if key == parenttype or key is None and vcs != '*':
319 vcs = key 316 vcs = key
320 source, rev = merge_seqs(sources.get("*"), sources.get(vcs)) 317 source, rev = merge_seqs(sources.get('*'), sources.get(vcs))
321 318
322 if not (vcs and source and rev): 319 if not (vcs and source and rev):
323 logging.warning("No valid source / revision found to create %s" % ta rget) 320 logging.warning('No valid source / revision found to create %s' % ta rget)
324 continue 321 continue
325 322
326 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source ) 323 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source )
327 update_repo(target, vcs, rev) 324 update_repo(target, vcs, rev)
328 resolve_deps(target, level + 1, self_update=False, 325 resolve_deps(target, level + 1, self_update=False,
329 overrideroots=overrideroots, skipdependencies=skipdependenc ies) 326 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
330 327
331 if self_update and "_self" in config and "*" in config["_self"]: 328 if self_update and '_self' in config and '*' in config['_self']:
332 source = safe_join(repodir, config["_self"]["*"]) 329 source = safe_join(repodir, config['_self']['*'])
333 try: 330 try:
334 with io.open(source, "rb") as handle: 331 with io.open(source, 'rb') as handle:
335 sourcedata = handle.read() 332 sourcedata = handle.read()
336 except IOError, e: 333 except IOError as e:
337 if e.errno != errno.ENOENT: 334 if e.errno != errno.ENOENT:
338 raise 335 raise
339 logging.warning("File %s doesn't exist, skipping self-update" % sour ce) 336 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
340 return 337 return
341 338
342 target = __file__ 339 target = __file__
343 with io.open(target, "rb") as handle: 340 with io.open(target, 'rb') as handle:
344 targetdata = handle.read() 341 targetdata = handle.read()
345 342
346 if sourcedata != targetdata: 343 if sourcedata != targetdata:
347 logging.info("Updating %s from %s, don't forget to commit" % (target , source)) 344 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
348 with io.open(target, "wb") as handle: 345 with io.open(target, 'wb') as handle:
349 handle.write(sourcedata) 346 handle.write(sourcedata)
350 if __name__ == "__main__": 347 if __name__ == '__main__':
351 logging.info("Restarting %s" % target) 348 logging.info('Restarting %s' % target)
352 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] ) 349 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
353 else: 350 else:
354 logging.warning("Cannot restart %s automatically, please rerun" % target) 351 logging.warning('Cannot restart %s automatically, please rerun' % target)
355 352
356 353
357 def _ensure_line_exists(path, pattern): 354 def _ensure_line_exists(path, pattern):
358 with open(path, 'a+') as f: 355 with open(path, 'a+') as f:
356 f.seek(0, os.SEEK_SET)
359 file_content = [l.strip() for l in f.readlines()] 357 file_content = [l.strip() for l in f.readlines()]
360 if not pattern in file_content: 358 if not pattern in file_content:
361 file_content.append(pattern) 359 file_content.append(pattern)
362 f.seek(0, os.SEEK_SET) 360 f.seek(0, os.SEEK_SET)
363 f.truncate() 361 f.truncate()
364 for l in file_content: 362 for l in file_content:
365 print >>f, l 363 print >>f, l
366 364
367 if __name__ == "__main__": 365 if __name__ == '__main__':
368 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 366 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
369 367
370 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 368 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.')
371 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help ="Repository path") 369 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path')
372 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress inf ormational output") 370 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output')
373 args = parser.parse_args() 371 args = parser.parse_args()
374 372
375 if args.quiet: 373 if args.quiet:
376 logging.disable(logging.INFO) 374 logging.disable(logging.INFO)
377 375
378 repos = args.repos 376 repos = args.repos
379 if not len(repos): 377 if not len(repos):
380 repos = [os.path.dirname(__file__)] 378 repos = [os.path.dirname(__file__)]
381 for repo in repos: 379 for repo in repos:
382 resolve_deps(repo) 380 resolve_deps(repo)
OLDNEW
« no previous file with comments | « build.py ('k') | metadata.generic » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld