Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29802595: Issue 6664 - Update the buildtools dependency for adblockpluscore
Patch Set: Created June 8, 2018, 4:56 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« dependencies ('K') | « dependencies ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8
3 2
4 # This Source Code Form is subject to the terms of the Mozilla Public 3 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 4 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 6
8 import sys 7 import sys
9 import os 8 import os
10 import posixpath 9 import posixpath
11 import re 10 import re
12 import io 11 import io
13 import errno 12 import errno
14 import logging 13 import logging
15 import subprocess 14 import subprocess
16 import urlparse 15 import urlparse
17 import argparse 16 import argparse
17 import json
18 18
19 from collections import OrderedDict 19 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 20 from ConfigParser import RawConfigParser
21 21
22 USAGE = """ 22 USAGE = '''
23 A dependencies file should look like this: 23 A dependencies file should look like this:
24 24
25 # VCS-specific root URLs for the repositories 25 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at 29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # tag "1.2". 30 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Clone buildtools repository into buildtools directory at VCS-specific 32 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the 35 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs . 36 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b 37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory, 38 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID. 39 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
41 """ 41 '''
42 42
43 SKIP_DEPENDENCY_UPDATES = os.environ.get( 43 SKIP_DEPENDENCY_UPDATES = os.environ.get(
44 "SKIP_DEPENDENCY_UPDATES", "" 44 'SKIP_DEPENDENCY_UPDATES', ''
45 ).lower() not in ("", "0", "false") 45 ).lower() not in ('', '0', 'false')
46
47 NPM_LOCKFILE = '.npm_install_lock'
46 48
47 49
48 class Mercurial(): 50 class Mercurial:
49 def istype(self, repodir): 51 def istype(self, repodir):
50 return os.path.exists(os.path.join(repodir, ".hg")) 52 return os.path.exists(os.path.join(repodir, '.hg'))
51 53
52 def clone(self, source, target): 54 def clone(self, source, target):
53 if not source.endswith("/"): 55 if not source.endswith('/'):
54 source += "/" 56 source += '/'
55 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, t arget]) 57 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget])
56 58
57 def get_revision_id(self, repo, rev=None): 59 def get_revision_id(self, repo, rev=None):
58 command = ["hg", "id", "--repository", repo, "--id"] 60 command = ['hg', 'id', '--repository', repo, '--id']
59 if rev: 61 if rev:
60 command.extend(["--rev", rev]) 62 command.extend(['--rev', rev])
61 63
62 # Ignore stderr output and return code here: if revision lookup failed w e 64 # Ignore stderr output and return code here: if revision lookup failed w e
63 # should simply return an empty string. 65 # should simply return an empty string.
64 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0] 66 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
65 return result.strip() 67 return result.strip()
66 68
67 def pull(self, repo): 69 def pull(self, repo):
68 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 70 subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet'])
69 71
70 def update(self, repo, rev, revname): 72 def update(self, repo, rev, revname):
71 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev]) 73 subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', '--check', '--rev', rev])
72 74
73 def ignore(self, target, repo): 75 def ignore(self, target, repo):
76 config_path = os.path.join(repo, '.hg', 'hgrc')
77 ignore_file = os.path.join('.hg', 'dependencies')
78 ignore_path = os.path.join(repo, ignore_file)
74 79
75 if not self.istype(target): 80 config = RawConfigParser()
81 config.read(config_path)
76 82
77 config_path = os.path.join(repo, ".hg", "hgrc") 83 if not config.has_section('ui'):
78 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencie s")) 84 config.add_section('ui')
79 85
80 config = RawConfigParser() 86 config.set('ui', 'ignore.dependencies', ignore_file)
81 config.read(config_path) 87 with open(config_path, 'w') as stream:
88 config.write(stream)
82 89
83 if not config.has_section("ui"): 90 module = os.path.relpath(target, repo)
84 config.add_section("ui") 91 _ensure_line_exists(ignore_path, module)
85
86 config.set("ui", "ignore.dependencies", ignore_path)
87 with open(config_path, "w") as stream:
88 config.write(stream)
89
90 module = os.path.relpath(target, repo)
91 _ensure_line_exists(ignore_path, module)
92 92
93 def postprocess_url(self, url): 93 def postprocess_url(self, url):
94 return url 94 return url
95 95
96 96
97 class Git(): 97 class Git:
98 def istype(self, repodir): 98 def istype(self, repodir):
99 return os.path.exists(os.path.join(repodir, ".git")) 99 return os.path.exists(os.path.join(repodir, '.git'))
100 100
101 def clone(self, source, target): 101 def clone(self, source, target):
102 source = source.rstrip("/") 102 source = source.rstrip('/')
103 if not source.endswith(".git"): 103 if not source.endswith('.git'):
104 source += ".git" 104 source += '.git'
105 subprocess.check_call(["git", "clone", "--quiet", source, target]) 105 subprocess.check_call(['git', 'clone', '--quiet', source, target])
106 106
107 def get_revision_id(self, repo, rev="HEAD"): 107 def get_revision_id(self, repo, rev='HEAD'):
108 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 108 command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}']
109 return subprocess.check_output(command, cwd=repo).strip() 109 return subprocess.check_output(command, cwd=repo).strip()
110 110
111 def pull(self, repo): 111 def pull(self, repo):
112 # Fetch tracked branches, new tags and the list of available remote bran ches 112 # Fetch tracked branches, new tags and the list of available remote bran ches
113 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cw d=repo) 113 subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw d=repo)
114 # Next we need to ensure all remote branches are tracked 114 # Next we need to ensure all remote branches are tracked
115 newly_tracked = False 115 newly_tracked = False
116 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=re po) 116 remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re po)
117 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): 117 for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M):
118 remote, local = match.groups() 118 remote, local = match.groups()
119 with open(os.devnull, "wb") as devnull: 119 with open(os.devnull, 'wb') as devnull:
120 if subprocess.call(["git", "branch", "--track", local, remote], 120 if subprocess.call(['git', 'branch', '--track', local, remote],
121 cwd=repo, stdout=devnull, stderr=devnull) == 0: 121 cwd=repo, stdout=devnull, stderr=devnull) == 0:
122 newly_tracked = True 122 newly_tracked = True
123 # Finally fetch any newly tracked remote branches 123 # Finally fetch any newly tracked remote branches
124 if newly_tracked: 124 if newly_tracked:
125 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=rep o) 125 subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep o)
126 126
127 def update(self, repo, rev, revname): 127 def update(self, repo, rev, revname):
128 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo) 128 subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo)
129 129
130 def ignore(self, target, repo): 130 def ignore(self, target, repo):
131 module = os.path.sep + os.path.relpath(target, repo) 131 module = os.path.sep + os.path.relpath(target, repo)
132 exclude_file = os.path.join(repo, ".git", "info", "exclude") 132 exclude_file = os.path.join(repo, '.git', 'info', 'exclude')
133 _ensure_line_exists(exclude_file, module) 133 _ensure_line_exists(exclude_file, module)
134 134
135 def postprocess_url(self, url): 135 def postprocess_url(self, url):
136 # Handle alternative syntax of SSH URLS 136 # Handle alternative syntax of SSH URLS
137 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 137 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme:
138 return "ssh://" + url.replace(":", "/", 1) 138 return 'ssh://' + url.replace(':', '/', 1)
139 return url 139 return url
140 140
141
141 repo_types = OrderedDict(( 142 repo_types = OrderedDict((
142 ("hg", Mercurial()), 143 ('hg', Mercurial()),
143 ("git", Git()), 144 ('git', Git()),
144 )) 145 ))
145 146
146 # [vcs:]value 147 # [vcs:]value
147 item_regexp = re.compile( 148 item_regexp = re.compile(
148 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?" 149 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?'
149 "(.+)$" 150 '(.+)$'
150 ) 151 )
151 152
152 # [url@]rev 153 # [url@]rev
153 source_regexp = re.compile( 154 source_regexp = re.compile(
154 "^(?:(.*)@)?" 155 '^(?:(.*)@)?'
155 "(.+)$" 156 '(.+)$'
156 ) 157 )
157 158
158 159
159 def merge_seqs(seq1, seq2): 160 def merge_seqs(seq1, seq2):
160 """Return a list of any truthy values from the suplied sequences 161 """Return a list of any truthy values from the suplied sequences
161 162
162 (None, 2), (1,) => [1, 2] 163 (None, 2), (1,) => [1, 2]
163 None, (1, 2) => [1, 2] 164 None, (1, 2) => [1, 2]
164 (1, 2), (3, 4) => [3, 4] 165 (1, 2), (3, 4) => [3, 4]
165 """ 166 """
166 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ()) 167 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
167 168
168 169
169 def parse_spec(path, line): 170 def parse_spec(path, line):
170 if "=" not in line: 171 if '=' not in line:
171 logging.warning("Invalid line in file %s: %s" % (path, line)) 172 logging.warning('Invalid line in file %s: %s' % (path, line))
172 return None, None 173 return None, None
173 174
174 key, value = line.split("=", 1) 175 key, value = line.split('=', 1)
175 key = key.strip() 176 key = key.strip()
176 items = value.split() 177 items = value.split()
177 if not len(items): 178 if not len(items):
178 logging.warning("No value specified for key %s in file %s" % (key, path) ) 179 logging.warning('No value specified for key %s in file %s' % (key, path) )
179 return key, None 180 return key, None
180 181
181 result = OrderedDict() 182 result = OrderedDict()
182 is_dependency_field = not key.startswith("_") 183 is_dependency_field = not key.startswith('_')
183 184
184 for i, item in enumerate(items): 185 for i, item in enumerate(items):
185 try: 186 try:
186 vcs, value = re.search(item_regexp, item).groups() 187 vcs, value = re.search(item_regexp, item).groups()
187 vcs = vcs or "*" 188 vcs = vcs or '*'
188 if is_dependency_field: 189 if is_dependency_field:
189 if i == 0 and vcs == "*": 190 if i == 0 and vcs == '*':
190 # In order to be backwards compatible we have to assume that the first 191 # In order to be backwards compatible we have to assume that the first
191 # source contains only a URL/path for the repo if it does no t contain 192 # source contains only a URL/path for the repo if it does no t contain
192 # the VCS part 193 # the VCS part
193 url_rev = (value, None) 194 url_rev = (value, None)
194 else: 195 else:
195 url_rev = re.search(source_regexp, value).groups() 196 url_rev = re.search(source_regexp, value).groups()
196 result[vcs] = merge_seqs(result.get(vcs), url_rev) 197 result[vcs] = merge_seqs(result.get(vcs), url_rev)
197 else: 198 else:
198 if vcs in result: 199 if vcs in result:
199 logging.warning("Ignoring duplicate value for type %r " 200 logging.warning('Ignoring duplicate value for type %r '
200 "(key %r in file %r)" % (vcs, key, path)) 201 '(key %r in file %r)' % (vcs, key, path))
201 result[vcs] = value 202 result[vcs] = value
202 except AttributeError: 203 except AttributeError:
203 logging.warning("Ignoring invalid item %r for type %r " 204 logging.warning('Ignoring invalid item %r for type %r '
204 "(key %r in file %r)" % (item, vcs, key, path)) 205 '(key %r in file %r)' % (item, vcs, key, path))
205 continue 206 continue
206 return key, result 207 return key, result
207 208
208 209
209 def read_deps(repodir): 210 def read_deps(repodir):
210 result = {} 211 result = {}
211 deps_path = os.path.join(repodir, "dependencies") 212 deps_path = os.path.join(repodir, 'dependencies')
212 try: 213 try:
213 with io.open(deps_path, "rt", encoding="utf-8") as handle: 214 with io.open(deps_path, 'rt', encoding='utf-8') as handle:
214 for line in handle: 215 for line in handle:
215 # Remove comments and whitespace 216 # Remove comments and whitespace
216 line = re.sub(r"#.*", "", line).strip() 217 line = re.sub(r'#.*', '', line).strip()
217 if not line: 218 if not line:
218 continue 219 continue
219 220
220 key, spec = parse_spec(deps_path, line) 221 key, spec = parse_spec(deps_path, line)
221 if spec: 222 if spec:
222 result[key] = spec 223 result[key] = spec
223 return result 224 return result
224 except IOError, e: 225 except IOError as e:
225 if e.errno != errno.ENOENT: 226 if e.errno != errno.ENOENT:
226 raise 227 raise
227 return None 228 return None
228 229
229 230
230 def safe_join(path, subpath): 231 def safe_join(path, subpath):
231 # This has been inspired by Flask's safe_join() function 232 # This has been inspired by Flask's safe_join() function
232 forbidden = {os.sep, os.altsep} - {posixpath.sep, None} 233 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
233 if any(sep in subpath for sep in forbidden): 234 if any(sep in subpath for sep in forbidden):
234 raise Exception("Illegal directory separator in dependency path %s" % su bpath) 235 raise Exception('Illegal directory separator in dependency path %s' % su bpath)
235 236
236 normpath = posixpath.normpath(subpath) 237 normpath = posixpath.normpath(subpath)
237 if posixpath.isabs(normpath): 238 if posixpath.isabs(normpath):
238 raise Exception("Dependency path %s cannot be absolute" % subpath) 239 raise Exception('Dependency path %s cannot be absolute' % subpath)
239 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep): 240 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
240 raise Exception("Dependency path %s has to be inside the repository" % s ubpath) 241 raise Exception('Dependency path %s has to be inside the repository' % s ubpath)
241 return os.path.join(path, *normpath.split(posixpath.sep)) 242 return os.path.join(path, *normpath.split(posixpath.sep))
242 243
243 244
244 def get_repo_type(repo): 245 def get_repo_type(repo):
245 for name, repotype in repo_types.iteritems(): 246 for name, repotype in repo_types.iteritems():
246 if repotype.istype(repo): 247 if repotype.istype(repo):
247 return name 248 return name
248 return "hg" 249 return 'hg'
250
251
252 def resolve_npm_dependencies(target, vcs):
253 """Install Node.js production-only dependencies if necessary and desired.
254
255 When the target dependency has additional Node.js dependencies declared
256 run "npm install --only=production --loglevel=warn" to resolve the declared
257 dependencies.
258
259 Additionally, make sure that any VCS will ignore the installed files.
260
261 Requires Node.js to be installed locally.
262 """
263 try:
264 with open(os.path.join(target, 'package.json'), 'r') as fp:
265 package_data = json.load(fp)
266
267 # In case a package.json does not exist at all or if there are no
268 # production dependencies declared, we don't need to run npm and can
269 # bail out early.
270 if not package_data.get('dependencies', False):
271 return
272 except IOError:
273 return
274
275 try:
276 # Create an empty file, which gets deleted after successfully
277 # installing Node.js dependencies.
278 lockfile_path = os.path.join(target, NPM_LOCKFILE)
279 open(lockfile_path, 'a').close()
280
281 if os.name == 'nt':
282 # Windows' CreateProcess() (called by subprocess.Popen()) only
283 # resolves executables ending in .exe. The windows installation of
284 # Node.js only provides a npm.cmd, which is executable but won't
285 # be recognized as such by CreateProcess().
286 npm_exec = 'npm.cmd'
287 else:
288 npm_exec = 'npm'
289
290 cmd = [npm_exec, 'install', '--only=production', '--loglevel=warn',
291 '--no-package-lock', '--no-optional']
292 subprocess.check_output(cmd, cwd=target)
293
294 repo_types[vcs].ignore(os.path.join(target, NPM_LOCKFILE), target)
295 repo_types[vcs].ignore(os.path.join(target, 'node_modules'), target)
296
297 os.remove(lockfile_path)
298 except OSError as e:
299 import errno
300 if e.errno == errno.ENOENT:
301 logging.error('Failed to install Node.js dependencies for %s,'
302 ' please ensure Node.js is installed.', target)
303 else:
304 raise
249 305
250 306
251 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): 307 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
252 if os.path.exists(target): 308 if os.path.exists(target):
253 return 309 return False
254 310
255 if SKIP_DEPENDENCY_UPDATES: 311 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 312 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
257 "%s not cloned", target) 313 '%s not cloned', target)
258 return 314 return False
259 315
260 postprocess_url = repo_types[type].postprocess_url 316 postprocess_url = repo_types[type].postprocess_url
261 root = postprocess_url(root) 317 root = postprocess_url(root)
262 sourcename = postprocess_url(sourcename) 318 sourcename = postprocess_url(sourcename)
263 319
264 if os.path.exists(root): 320 if os.path.exists(root):
265 url = os.path.join(root, sourcename) 321 url = os.path.join(root, sourcename)
266 else: 322 else:
267 url = urlparse.urljoin(root, sourcename) 323 url = urlparse.urljoin(root, sourcename)
268 324
269 logging.info("Cloning repository %s into %s" % (url, target)) 325 logging.info('Cloning repository %s into %s' % (url, target))
270 repo_types[type].clone(url, target) 326 repo_types[type].clone(url, target)
271 repo_types[parenttype].ignore(target, parentrepo) 327 repo_types[parenttype].ignore(target, parentrepo)
328 return True
272 329
273 330
274 def update_repo(target, type, revision): 331 def update_repo(target, type, revision):
275 resolved_revision = repo_types[type].get_revision_id(target, revision) 332 resolved_revision = repo_types[type].get_revision_id(target, revision)
276 current_revision = repo_types[type].get_revision_id(target) 333 current_revision = repo_types[type].get_revision_id(target)
277 334
278 if resolved_revision != current_revision: 335 if resolved_revision != current_revision:
279 if SKIP_DEPENDENCY_UPDATES: 336 if SKIP_DEPENDENCY_UPDATES:
280 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 337 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
281 "%s not checked out to %s", target, revision) 338 '%s not checked out to %s', target, revision)
282 return 339 return False
283 340
284 if not resolved_revision: 341 if not resolved_revision:
285 logging.info("Revision %s is unknown, downloading remote changes" % revision) 342 logging.info('Revision %s is unknown, downloading remote changes' % revision)
286 repo_types[type].pull(target) 343 repo_types[type].pull(target)
287 resolved_revision = repo_types[type].get_revision_id(target, revisio n) 344 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
288 if not resolved_revision: 345 if not resolved_revision:
289 raise Exception("Failed to resolve revision %s" % revision) 346 raise Exception('Failed to resolve revision %s' % revision)
290 347
291 logging.info("Updating repository %s to revision %s" % (target, resolved _revision)) 348 logging.info('Updating repository %s to revision %s' % (target, resolved _revision))
292 repo_types[type].update(target, resolved_revision, revision) 349 repo_types[type].update(target, resolved_revision, revision)
350 return True
351 return False
293 352
294 353
295 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 354 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
296 config = read_deps(repodir) 355 config = read_deps(repodir)
297 if config is None: 356 if config is None:
298 if level == 0: 357 if level == 0:
299 logging.warning("No dependencies file in directory %s, nothing to do ...\n%s" % (repodir, USAGE)) 358 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE))
300 return 359 return
301 if level >= 10: 360 if level >= 10:
302 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 361 logging.warning('Too much subrepository nesting, ignoring %s' % repo)
303 return 362 return
304 363
305 if overrideroots is not None: 364 if overrideroots is not None:
306 config["_root"] = overrideroots 365 config['_root'] = overrideroots
307 366
308 for dir, sources in config.iteritems(): 367 for dir, sources in sorted(config.iteritems()):
309 if (dir.startswith("_") or 368 if (dir.startswith('_') or
310 skipdependencies.intersection([s[0] for s in sources if s[0]])): 369 skipdependencies.intersection([s[0] for s in sources if s[0]])):
311 continue 370 continue
312 371
313 target = safe_join(repodir, dir) 372 target = safe_join(repodir, dir)
314 parenttype = get_repo_type(repodir) 373 parenttype = get_repo_type(repodir)
315 _root = config.get("_root", {}) 374 _root = config.get('_root', {})
316 375
317 for key in sources.keys() + _root.keys(): 376 for key in sources.keys() + _root.keys():
318 if key == parenttype or key is None and vcs != "*": 377 if key == parenttype or key is None and vcs != '*':
319 vcs = key 378 vcs = key
320 source, rev = merge_seqs(sources.get("*"), sources.get(vcs)) 379 source, rev = merge_seqs(sources.get('*'), sources.get(vcs))
321 380
322 if not (vcs and source and rev): 381 if not (vcs and source and rev):
323 logging.warning("No valid source / revision found to create %s" % ta rget) 382 logging.warning('No valid source / revision found to create %s' % ta rget)
324 continue 383 continue
325 384
326 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source ) 385 repo_cloned = ensure_repo(repodir, parenttype, target, vcs,
327 update_repo(target, vcs, rev) 386 _root.get(vcs, ''), source)
387 repo_updated = update_repo(target, vcs, rev)
388 recent_npm_failed = os.path.exists(os.path.join(target, NPM_LOCKFILE))
389 if repo_cloned or repo_updated or recent_npm_failed:
390 resolve_npm_dependencies(target, vcs)
328 resolve_deps(target, level + 1, self_update=False, 391 resolve_deps(target, level + 1, self_update=False,
329 overrideroots=overrideroots, skipdependencies=skipdependenc ies) 392 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
330 393
331 if self_update and "_self" in config and "*" in config["_self"]: 394 if self_update and '_self' in config and '*' in config['_self']:
332 source = safe_join(repodir, config["_self"]["*"]) 395 source = safe_join(repodir, config['_self']['*'])
333 try: 396 try:
334 with io.open(source, "rb") as handle: 397 with io.open(source, 'rb') as handle:
335 sourcedata = handle.read() 398 sourcedata = handle.read()
336 except IOError, e: 399 except IOError as e:
337 if e.errno != errno.ENOENT: 400 if e.errno != errno.ENOENT:
338 raise 401 raise
339 logging.warning("File %s doesn't exist, skipping self-update" % sour ce) 402 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
340 return 403 return
341 404
342 target = __file__ 405 target = __file__
343 with io.open(target, "rb") as handle: 406 with io.open(target, 'rb') as handle:
344 targetdata = handle.read() 407 targetdata = handle.read()
345 408
346 if sourcedata != targetdata: 409 if sourcedata != targetdata:
347 logging.info("Updating %s from %s, don't forget to commit" % (target , source)) 410 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
348 with io.open(target, "wb") as handle: 411 with io.open(target, 'wb') as handle:
349 handle.write(sourcedata) 412 handle.write(sourcedata)
350 if __name__ == "__main__": 413 if __name__ == '__main__':
351 logging.info("Restarting %s" % target) 414 logging.info('Restarting %s' % target)
352 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] ) 415 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
353 else: 416 else:
354 logging.warning("Cannot restart %s automatically, please rerun" % target) 417 logging.warning('Cannot restart %s automatically, please rerun' % target)
355 418
356 419
357 def _ensure_line_exists(path, pattern): 420 def _ensure_line_exists(path, pattern):
358 with open(path, 'a+') as f: 421 with open(path, 'a+') as f:
422 f.seek(0, os.SEEK_SET)
359 file_content = [l.strip() for l in f.readlines()] 423 file_content = [l.strip() for l in f.readlines()]
360 if not pattern in file_content: 424 if not pattern in file_content:
361 file_content.append(pattern) 425 file_content.append(pattern)
362 f.seek(0, os.SEEK_SET) 426 f.seek(0, os.SEEK_SET)
363 f.truncate() 427 f.truncate()
364 for l in file_content: 428 for l in file_content:
365 print >>f, l 429 print >>f, l
366 430
367 if __name__ == "__main__": 431
432 if __name__ == '__main__':
368 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 433 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
369 434
370 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 435 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.')
371 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help ="Repository path") 436 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path')
372 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress inf ormational output") 437 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output')
373 args = parser.parse_args() 438 args = parser.parse_args()
374 439
375 if args.quiet: 440 if args.quiet:
376 logging.disable(logging.INFO) 441 logging.disable(logging.INFO)
377 442
378 repos = args.repos 443 repos = args.repos
379 if not len(repos): 444 if not len(repos):
380 repos = [os.path.dirname(__file__)] 445 repos = [os.path.dirname(__file__)]
381 for repo in repos: 446 for repo in repos:
382 resolve_deps(repo) 447 resolve_deps(repo)
OLDNEW
« dependencies ('K') | « dependencies ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld