Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29349976: Issue 4346 - Fix exception when creating Safari builds caused by buildtools changes (Closed)
Patch Set: Created Aug. 19, 2016, 12:38 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « dependencies ('k') | sitescripts/extensions/bin/createNightlies.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8
3 2
4 # This Source Code Form is subject to the terms of the Mozilla Public 3 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 4 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 6
8 import sys 7 import sys
9 import os 8 import os
10 import posixpath 9 import posixpath
11 import re 10 import re
12 import io 11 import io
13 import errno 12 import errno
14 import logging 13 import logging
15 import subprocess 14 import subprocess
16 import urlparse 15 import urlparse
17 import argparse 16 import argparse
18 17
19 from collections import OrderedDict 18 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 19 from ConfigParser import RawConfigParser
21 20
22 USAGE = """ 21 USAGE = '''
23 A dependencies file should look like this: 22 A dependencies file should look like this:
24 23
25 # VCS-specific root URLs for the repositories 24 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 25 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 26 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 27 _self = buildtools/ensure_dependencies.py
29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at 28 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # tag "1.2". 29 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 30 extensions/elemhidehelper = elemhidehelper 1.2
32 # Clone buildtools repository into buildtools directory at VCS-specific 31 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 32 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 33 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the 34 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs . 35 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b 36 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory, 37 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID. 38 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 39 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
41 """ 40 '''
42 41
43 SKIP_DEPENDENCY_UPDATES = os.environ.get( 42 SKIP_DEPENDENCY_UPDATES = os.environ.get(
44 "SKIP_DEPENDENCY_UPDATES", "" 43 'SKIP_DEPENDENCY_UPDATES', ''
45 ).lower() not in ("", "0", "false") 44 ).lower() not in ('', '0', 'false')
46 45
47 46
48 class Mercurial(): 47 class Mercurial():
49 def istype(self, repodir): 48 def istype(self, repodir):
50 return os.path.exists(os.path.join(repodir, ".hg")) 49 return os.path.exists(os.path.join(repodir, '.hg'))
51 50
52 def clone(self, source, target): 51 def clone(self, source, target):
53 if not source.endswith("/"): 52 if not source.endswith('/'):
54 source += "/" 53 source += '/'
55 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, t arget]) 54 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget])
56 55
57 def get_revision_id(self, repo, rev=None): 56 def get_revision_id(self, repo, rev=None):
58 command = ["hg", "id", "--repository", repo, "--id"] 57 command = ['hg', 'id', '--repository', repo, '--id']
59 if rev: 58 if rev:
60 command.extend(["--rev", rev]) 59 command.extend(['--rev', rev])
61 60
62 # Ignore stderr output and return code here: if revision lookup failed w e 61 # Ignore stderr output and return code here: if revision lookup failed w e
63 # should simply return an empty string. 62 # should simply return an empty string.
64 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0] 63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
65 return result.strip() 64 return result.strip()
66 65
67 def pull(self, repo): 66 def pull(self, repo):
68 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 67 subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet'])
69 68
70 def update(self, repo, rev, revname): 69 def update(self, repo, rev, revname):
71 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev]) 70 subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', '--check', '--rev', rev])
72 71
73 def ignore(self, target, repo): 72 def ignore(self, target, repo):
74 73
75 if not self.istype(target): 74 if not self.istype(target):
76 75
77 config_path = os.path.join(repo, ".hg", "hgrc") 76 config_path = os.path.join(repo, '.hg', 'hgrc')
78 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencie s")) 77 ignore_path = os.path.abspath(os.path.join(repo, '.hg', 'dependencie s'))
79 78
80 config = RawConfigParser() 79 config = RawConfigParser()
81 config.read(config_path) 80 config.read(config_path)
82 81
83 if not config.has_section("ui"): 82 if not config.has_section('ui'):
84 config.add_section("ui") 83 config.add_section('ui')
85 84
86 config.set("ui", "ignore.dependencies", ignore_path) 85 config.set('ui', 'ignore.dependencies', ignore_path)
87 with open(config_path, "w") as stream: 86 with open(config_path, 'w') as stream:
88 config.write(stream) 87 config.write(stream)
89 88
90 module = os.path.relpath(target, repo) 89 module = os.path.relpath(target, repo)
91 _ensure_line_exists(ignore_path, module) 90 _ensure_line_exists(ignore_path, module)
92 91
93 def postprocess_url(self, url): 92 def postprocess_url(self, url):
94 return url 93 return url
95 94
96 95
97 class Git(): 96 class Git():
98 def istype(self, repodir): 97 def istype(self, repodir):
99 return os.path.exists(os.path.join(repodir, ".git")) 98 return os.path.exists(os.path.join(repodir, '.git'))
100 99
101 def clone(self, source, target): 100 def clone(self, source, target):
102 source = source.rstrip("/") 101 source = source.rstrip('/')
103 if not source.endswith(".git"): 102 if not source.endswith('.git'):
104 source += ".git" 103 source += '.git'
105 subprocess.check_call(["git", "clone", "--quiet", source, target]) 104 subprocess.check_call(['git', 'clone', '--quiet', source, target])
106 105
107 def get_revision_id(self, repo, rev="HEAD"): 106 def get_revision_id(self, repo, rev='HEAD'):
108 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 107 command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}']
109 return subprocess.check_output(command, cwd=repo).strip() 108 return subprocess.check_output(command, cwd=repo).strip()
110 109
111 def pull(self, repo): 110 def pull(self, repo):
112 # Fetch tracked branches, new tags and the list of available remote bran ches 111 # Fetch tracked branches, new tags and the list of available remote bran ches
113 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cw d=repo) 112 subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw d=repo)
114 # Next we need to ensure all remote branches are tracked 113 # Next we need to ensure all remote branches are tracked
115 newly_tracked = False 114 newly_tracked = False
116 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=re po) 115 remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re po)
117 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): 116 for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M):
118 remote, local = match.groups() 117 remote, local = match.groups()
119 with open(os.devnull, "wb") as devnull: 118 with open(os.devnull, 'wb') as devnull:
120 if subprocess.call(["git", "branch", "--track", local, remote], 119 if subprocess.call(['git', 'branch', '--track', local, remote],
121 cwd=repo, stdout=devnull, stderr=devnull) == 0: 120 cwd=repo, stdout=devnull, stderr=devnull) == 0:
122 newly_tracked = True 121 newly_tracked = True
123 # Finally fetch any newly tracked remote branches 122 # Finally fetch any newly tracked remote branches
124 if newly_tracked: 123 if newly_tracked:
125 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=rep o) 124 subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep o)
126 125
127 def update(self, repo, rev, revname): 126 def update(self, repo, rev, revname):
128 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo) 127 subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo)
129 128
130 def ignore(self, target, repo): 129 def ignore(self, target, repo):
131 module = os.path.sep + os.path.relpath(target, repo) 130 module = os.path.sep + os.path.relpath(target, repo)
132 exclude_file = os.path.join(repo, ".git", "info", "exclude") 131 exclude_file = os.path.join(repo, '.git', 'info', 'exclude')
133 _ensure_line_exists(exclude_file, module) 132 _ensure_line_exists(exclude_file, module)
134 133
135 def postprocess_url(self, url): 134 def postprocess_url(self, url):
136 # Handle alternative syntax of SSH URLS 135 # Handle alternative syntax of SSH URLS
137 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 136 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme:
138 return "ssh://" + url.replace(":", "/", 1) 137 return 'ssh://' + url.replace(':', '/', 1)
139 return url 138 return url
140 139
141 repo_types = OrderedDict(( 140 repo_types = OrderedDict((
142 ("hg", Mercurial()), 141 ('hg', Mercurial()),
143 ("git", Git()), 142 ('git', Git()),
144 )) 143 ))
145 144
146 # [vcs:]value 145 # [vcs:]value
147 item_regexp = re.compile( 146 item_regexp = re.compile(
148 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?" 147 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?'
149 "(.+)$" 148 '(.+)$'
150 ) 149 )
151 150
152 # [url@]rev 151 # [url@]rev
153 source_regexp = re.compile( 152 source_regexp = re.compile(
154 "^(?:(.*)@)?" 153 '^(?:(.*)@)?'
155 "(.+)$" 154 '(.+)$'
156 ) 155 )
157 156
158 157
159 def merge_seqs(seq1, seq2): 158 def merge_seqs(seq1, seq2):
160 """Return a list of any truthy values from the suplied sequences 159 """Return a list of any truthy values from the suplied sequences
161 160
162 (None, 2), (1,) => [1, 2] 161 (None, 2), (1,) => [1, 2]
163 None, (1, 2) => [1, 2] 162 None, (1, 2) => [1, 2]
164 (1, 2), (3, 4) => [3, 4] 163 (1, 2), (3, 4) => [3, 4]
165 """ 164 """
166 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ()) 165 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
167 166
168 167
169 def parse_spec(path, line): 168 def parse_spec(path, line):
170 if "=" not in line: 169 if '=' not in line:
171 logging.warning("Invalid line in file %s: %s" % (path, line)) 170 logging.warning('Invalid line in file %s: %s' % (path, line))
172 return None, None 171 return None, None
173 172
174 key, value = line.split("=", 1) 173 key, value = line.split('=', 1)
175 key = key.strip() 174 key = key.strip()
176 items = value.split() 175 items = value.split()
177 if not len(items): 176 if not len(items):
178 logging.warning("No value specified for key %s in file %s" % (key, path) ) 177 logging.warning('No value specified for key %s in file %s' % (key, path) )
179 return key, None 178 return key, None
180 179
181 result = OrderedDict() 180 result = OrderedDict()
182 is_dependency_field = not key.startswith("_") 181 is_dependency_field = not key.startswith('_')
183 182
184 for i, item in enumerate(items): 183 for i, item in enumerate(items):
185 try: 184 try:
186 vcs, value = re.search(item_regexp, item).groups() 185 vcs, value = re.search(item_regexp, item).groups()
187 vcs = vcs or "*" 186 vcs = vcs or '*'
188 if is_dependency_field: 187 if is_dependency_field:
189 if i == 0 and vcs == "*": 188 if i == 0 and vcs == '*':
190 # In order to be backwards compatible we have to assume that the first 189 # In order to be backwards compatible we have to assume that the first
191 # source contains only a URL/path for the repo if it does no t contain 190 # source contains only a URL/path for the repo if it does no t contain
192 # the VCS part 191 # the VCS part
193 url_rev = (value, None) 192 url_rev = (value, None)
194 else: 193 else:
195 url_rev = re.search(source_regexp, value).groups() 194 url_rev = re.search(source_regexp, value).groups()
196 result[vcs] = merge_seqs(result.get(vcs), url_rev) 195 result[vcs] = merge_seqs(result.get(vcs), url_rev)
197 else: 196 else:
198 if vcs in result: 197 if vcs in result:
199 logging.warning("Ignoring duplicate value for type %r " 198 logging.warning('Ignoring duplicate value for type %r '
200 "(key %r in file %r)" % (vcs, key, path)) 199 '(key %r in file %r)' % (vcs, key, path))
201 result[vcs] = value 200 result[vcs] = value
202 except AttributeError: 201 except AttributeError:
203 logging.warning("Ignoring invalid item %r for type %r " 202 logging.warning('Ignoring invalid item %r for type %r '
204 "(key %r in file %r)" % (item, vcs, key, path)) 203 '(key %r in file %r)' % (item, vcs, key, path))
205 continue 204 continue
206 return key, result 205 return key, result
207 206
208 207
209 def read_deps(repodir): 208 def read_deps(repodir):
210 result = {} 209 result = {}
211 deps_path = os.path.join(repodir, "dependencies") 210 deps_path = os.path.join(repodir, 'dependencies')
212 try: 211 try:
213 with io.open(deps_path, "rt", encoding="utf-8") as handle: 212 with io.open(deps_path, 'rt', encoding='utf-8') as handle:
214 for line in handle: 213 for line in handle:
215 # Remove comments and whitespace 214 # Remove comments and whitespace
216 line = re.sub(r"#.*", "", line).strip() 215 line = re.sub(r'#.*', '', line).strip()
217 if not line: 216 if not line:
218 continue 217 continue
219 218
220 key, spec = parse_spec(deps_path, line) 219 key, spec = parse_spec(deps_path, line)
221 if spec: 220 if spec:
222 result[key] = spec 221 result[key] = spec
223 return result 222 return result
224 except IOError, e: 223 except IOError, e:
225 if e.errno != errno.ENOENT: 224 if e.errno != errno.ENOENT:
226 raise 225 raise
227 return None 226 return None
228 227
229 228
230 def safe_join(path, subpath): 229 def safe_join(path, subpath):
231 # This has been inspired by Flask's safe_join() function 230 # This has been inspired by Flask's safe_join() function
232 forbidden = {os.sep, os.altsep} - {posixpath.sep, None} 231 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
233 if any(sep in subpath for sep in forbidden): 232 if any(sep in subpath for sep in forbidden):
234 raise Exception("Illegal directory separator in dependency path %s" % su bpath) 233 raise Exception('Illegal directory separator in dependency path %s' % su bpath)
235 234
236 normpath = posixpath.normpath(subpath) 235 normpath = posixpath.normpath(subpath)
237 if posixpath.isabs(normpath): 236 if posixpath.isabs(normpath):
238 raise Exception("Dependency path %s cannot be absolute" % subpath) 237 raise Exception('Dependency path %s cannot be absolute' % subpath)
239 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep): 238 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
240 raise Exception("Dependency path %s has to be inside the repository" % s ubpath) 239 raise Exception('Dependency path %s has to be inside the repository' % s ubpath)
241 return os.path.join(path, *normpath.split(posixpath.sep)) 240 return os.path.join(path, *normpath.split(posixpath.sep))
242 241
243 242
244 def get_repo_type(repo): 243 def get_repo_type(repo):
245 for name, repotype in repo_types.iteritems(): 244 for name, repotype in repo_types.iteritems():
246 if repotype.istype(repo): 245 if repotype.istype(repo):
247 return name 246 return name
248 return "hg" 247 return 'hg'
249 248
250 249
251 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): 250 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
252 if os.path.exists(target): 251 if os.path.exists(target):
253 return 252 return
254 253
255 if SKIP_DEPENDENCY_UPDATES: 254 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 255 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
257 "%s not cloned", target) 256 '%s not cloned', target)
258 return 257 return
259 258
260 postprocess_url = repo_types[type].postprocess_url 259 postprocess_url = repo_types[type].postprocess_url
261 root = postprocess_url(root) 260 root = postprocess_url(root)
262 sourcename = postprocess_url(sourcename) 261 sourcename = postprocess_url(sourcename)
263 262
264 if os.path.exists(root): 263 if os.path.exists(root):
265 url = os.path.join(root, sourcename) 264 url = os.path.join(root, sourcename)
266 else: 265 else:
267 url = urlparse.urljoin(root, sourcename) 266 url = urlparse.urljoin(root, sourcename)
268 267
269 logging.info("Cloning repository %s into %s" % (url, target)) 268 logging.info('Cloning repository %s into %s' % (url, target))
270 repo_types[type].clone(url, target) 269 repo_types[type].clone(url, target)
271 repo_types[parenttype].ignore(target, parentrepo) 270 repo_types[parenttype].ignore(target, parentrepo)
272 271
273 272
274 def update_repo(target, type, revision): 273 def update_repo(target, type, revision):
275 resolved_revision = repo_types[type].get_revision_id(target, revision) 274 resolved_revision = repo_types[type].get_revision_id(target, revision)
276 current_revision = repo_types[type].get_revision_id(target) 275 current_revision = repo_types[type].get_revision_id(target)
277 276
278 if resolved_revision != current_revision: 277 if resolved_revision != current_revision:
279 if SKIP_DEPENDENCY_UPDATES: 278 if SKIP_DEPENDENCY_UPDATES:
280 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 279 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
281 "%s not checked out to %s", target, revision) 280 '%s not checked out to %s', target, revision)
282 return 281 return
283 282
284 if not resolved_revision: 283 if not resolved_revision:
285 logging.info("Revision %s is unknown, downloading remote changes" % revision) 284 logging.info('Revision %s is unknown, downloading remote changes' % revision)
286 repo_types[type].pull(target) 285 repo_types[type].pull(target)
287 resolved_revision = repo_types[type].get_revision_id(target, revisio n) 286 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
288 if not resolved_revision: 287 if not resolved_revision:
289 raise Exception("Failed to resolve revision %s" % revision) 288 raise Exception('Failed to resolve revision %s' % revision)
290 289
291 logging.info("Updating repository %s to revision %s" % (target, resolved _revision)) 290 logging.info('Updating repository %s to revision %s' % (target, resolved _revision))
292 repo_types[type].update(target, resolved_revision, revision) 291 repo_types[type].update(target, resolved_revision, revision)
293 292
294 293
295 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 294 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
296 config = read_deps(repodir) 295 config = read_deps(repodir)
297 if config is None: 296 if config is None:
298 if level == 0: 297 if level == 0:
299 logging.warning("No dependencies file in directory %s, nothing to do ...\n%s" % (repodir, USAGE)) 298 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE))
300 return 299 return
301 if level >= 10: 300 if level >= 10:
302 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 301 logging.warning('Too much subrepository nesting, ignoring %s' % repo)
303 return 302 return
304 303
305 if overrideroots is not None: 304 if overrideroots is not None:
306 config["_root"] = overrideroots 305 config['_root'] = overrideroots
307 306
308 for dir, sources in config.iteritems(): 307 for dir, sources in config.iteritems():
309 if (dir.startswith("_") or 308 if (dir.startswith('_') or
310 skipdependencies.intersection([s[0] for s in sources if s[0]])): 309 skipdependencies.intersection([s[0] for s in sources if s[0]])):
311 continue 310 continue
312 311
313 target = safe_join(repodir, dir) 312 target = safe_join(repodir, dir)
314 parenttype = get_repo_type(repodir) 313 parenttype = get_repo_type(repodir)
315 _root = config.get("_root", {}) 314 _root = config.get('_root', {})
316 315
317 for key in sources.keys() + _root.keys(): 316 for key in sources.keys() + _root.keys():
318 if key == parenttype or key is None and vcs != "*": 317 if key == parenttype or key is None and vcs != '*':
319 vcs = key 318 vcs = key
320 source, rev = merge_seqs(sources.get("*"), sources.get(vcs)) 319 source, rev = merge_seqs(sources.get('*'), sources.get(vcs))
321 320
322 if not (vcs and source and rev): 321 if not (vcs and source and rev):
323 logging.warning("No valid source / revision found to create %s" % ta rget) 322 logging.warning('No valid source / revision found to create %s' % ta rget)
324 continue 323 continue
325 324
326 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source ) 325 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ''), source )
327 update_repo(target, vcs, rev) 326 update_repo(target, vcs, rev)
328 resolve_deps(target, level + 1, self_update=False, 327 resolve_deps(target, level + 1, self_update=False,
329 overrideroots=overrideroots, skipdependencies=skipdependenc ies) 328 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
330 329
331 if self_update and "_self" in config and "*" in config["_self"]: 330 if self_update and '_self' in config and '*' in config['_self']:
332 source = safe_join(repodir, config["_self"]["*"]) 331 source = safe_join(repodir, config['_self']['*'])
333 try: 332 try:
334 with io.open(source, "rb") as handle: 333 with io.open(source, 'rb') as handle:
335 sourcedata = handle.read() 334 sourcedata = handle.read()
336 except IOError, e: 335 except IOError, e:
337 if e.errno != errno.ENOENT: 336 if e.errno != errno.ENOENT:
338 raise 337 raise
339 logging.warning("File %s doesn't exist, skipping self-update" % sour ce) 338 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
340 return 339 return
341 340
342 target = __file__ 341 target = __file__
343 with io.open(target, "rb") as handle: 342 with io.open(target, 'rb') as handle:
344 targetdata = handle.read() 343 targetdata = handle.read()
345 344
346 if sourcedata != targetdata: 345 if sourcedata != targetdata:
347 logging.info("Updating %s from %s, don't forget to commit" % (target , source)) 346 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
348 with io.open(target, "wb") as handle: 347 with io.open(target, 'wb') as handle:
349 handle.write(sourcedata) 348 handle.write(sourcedata)
350 if __name__ == "__main__": 349 if __name__ == '__main__':
351 logging.info("Restarting %s" % target) 350 logging.info('Restarting %s' % target)
352 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] ) 351 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
353 else: 352 else:
354 logging.warning("Cannot restart %s automatically, please rerun" % target) 353 logging.warning('Cannot restart %s automatically, please rerun' % target)
355 354
356 355
357 def _ensure_line_exists(path, pattern): 356 def _ensure_line_exists(path, pattern):
358 with open(path, 'a+') as f: 357 with open(path, 'a+') as f:
359 file_content = [l.strip() for l in f.readlines()] 358 file_content = [l.strip() for l in f.readlines()]
360 if not pattern in file_content: 359 if not pattern in file_content:
361 file_content.append(pattern) 360 file_content.append(pattern)
362 f.seek(0, os.SEEK_SET) 361 f.seek(0, os.SEEK_SET)
363 f.truncate() 362 f.truncate()
364 for l in file_content: 363 for l in file_content:
365 print >>f, l 364 print >>f, l
366 365
367 if __name__ == "__main__": 366 if __name__ == '__main__':
368 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 367 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
369 368
370 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 369 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.')
371 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help ="Repository path") 370 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path')
372 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress inf ormational output") 371 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output')
373 args = parser.parse_args() 372 args = parser.parse_args()
374 373
375 if args.quiet: 374 if args.quiet:
376 logging.disable(logging.INFO) 375 logging.disable(logging.INFO)
377 376
378 repos = args.repos 377 repos = args.repos
379 if not len(repos): 378 if not len(repos):
380 repos = [os.path.dirname(__file__)] 379 repos = [os.path.dirname(__file__)]
381 for repo in repos: 380 for repo in repos:
382 resolve_deps(repo) 381 resolve_deps(repo)
OLDNEW
« no previous file with comments | « dependencies ('k') | sitescripts/extensions/bin/createNightlies.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld