Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: ensure_dependencies.py

Issue 29565802: Issue 5836 - Update buildtools dependency in adblockplusui (Closed)
Left Patch Set: Created Oct. 5, 2017, 7:43 p.m.
Right Patch Set: Fixed default_locale Created Oct. 10, 2017, 7:13 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Right: Side by side diff | Download
« no previous file with change/comment | « dependencies ('k') | locale/bn/firstRun.json » ('j') | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
(no file at all)
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8
3 2
4 # This Source Code Form is subject to the terms of the Mozilla Public 3 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 4 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 5 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 6
8 import sys 7 import sys
9 import os 8 import os
10 import posixpath 9 import posixpath
11 import re 10 import re
12 import io 11 import io
13 import errno 12 import errno
14 import logging 13 import logging
15 import subprocess 14 import subprocess
16 import urlparse 15 import urlparse
17 import argparse 16 import argparse
17 import json
18 18
19 from collections import OrderedDict 19 from collections import OrderedDict
20 from ConfigParser import RawConfigParser 20 from ConfigParser import RawConfigParser
21 21
22 USAGE = """ 22 USAGE = '''
23 A dependencies file should look like this: 23 A dependencies file should look like this:
24 24
25 # VCS-specific root URLs for the repositories 25 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at 29 # Clone elemhidehelper repository into extensions/elemhidehelper directory at
30 # tag "1.2". 30 # tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Clone buildtools repository into buildtools directory at VCS-specific 32 # Clone buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the 35 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs . 36 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b 37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory, 38 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID. 39 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
41 """ 41 '''
42 42
43 SKIP_DEPENDENCY_UPDATES = os.environ.get( 43 SKIP_DEPENDENCY_UPDATES = os.environ.get(
44 "SKIP_DEPENDENCY_UPDATES", "" 44 'SKIP_DEPENDENCY_UPDATES', ''
45 ).lower() not in ("", "0", "false") 45 ).lower() not in ('', '0', 'false')
46 46
47 47
48 class Mercurial(): 48 class Mercurial():
49 def istype(self, repodir): 49 def istype(self, repodir):
50 return os.path.exists(os.path.join(repodir, ".hg")) 50 return os.path.exists(os.path.join(repodir, '.hg'))
51 51
52 def clone(self, source, target): 52 def clone(self, source, target):
53 if not source.endswith("/"): 53 if not source.endswith('/'):
54 source += "/" 54 source += '/'
55 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, t arget]) 55 subprocess.check_call(['hg', 'clone', '--quiet', '--noupdate', source, t arget])
56 56
57 def get_revision_id(self, repo, rev=None): 57 def get_revision_id(self, repo, rev=None):
58 command = ["hg", "id", "--repository", repo, "--id"] 58 command = ['hg', 'id', '--repository', repo, '--id']
59 if rev: 59 if rev:
60 command.extend(["--rev", rev]) 60 command.extend(['--rev', rev])
61 61
62 # Ignore stderr output and return code here: if revision lookup failed w e 62 # Ignore stderr output and return code here: if revision lookup failed w e
63 # should simply return an empty string. 63 # should simply return an empty string.
64 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0] 64 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
65 return result.strip() 65 return result.strip()
66 66
67 def pull(self, repo): 67 def pull(self, repo):
68 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 68 subprocess.check_call(['hg', 'pull', '--repository', repo, '--quiet'])
69 69
70 def update(self, repo, rev, revname): 70 def update(self, repo, rev, revname):
71 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev]) 71 subprocess.check_call(['hg', 'update', '--repository', repo, '--quiet', '--check', '--rev', rev])
72 72
73 def ignore(self, target, repo): 73 def ignore(self, target, repo):
74 74 config_path = os.path.join(repo, '.hg', 'hgrc')
75 if not self.istype(target): 75 ignore_file = os.path.join('.hg', 'dependencies')
76 76 ignore_path = os.path.join(repo, ignore_file)
77 config_path = os.path.join(repo, ".hg", "hgrc") 77
78 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencie s")) 78 config = RawConfigParser()
79 79 config.read(config_path)
80 config = RawConfigParser() 80
81 config.read(config_path) 81 if not config.has_section('ui'):
82 82 config.add_section('ui')
83 if not config.has_section("ui"): 83
84 config.add_section("ui") 84 config.set('ui', 'ignore.dependencies', ignore_file)
85 85 with open(config_path, 'w') as stream:
86 config.set("ui", "ignore.dependencies", ignore_path) 86 config.write(stream)
87 with open(config_path, "w") as stream: 87
88 config.write(stream) 88 module = os.path.relpath(target, repo)
89 89 _ensure_line_exists(ignore_path, module)
90 module = os.path.relpath(target, repo)
91 _ensure_line_exists(ignore_path, module)
92 90
93 def postprocess_url(self, url): 91 def postprocess_url(self, url):
94 return url 92 return url
95 93
96 94
97 class Git(): 95 class Git():
98 def istype(self, repodir): 96 def istype(self, repodir):
99 return os.path.exists(os.path.join(repodir, ".git")) 97 return os.path.exists(os.path.join(repodir, '.git'))
100 98
101 def clone(self, source, target): 99 def clone(self, source, target):
102 source = source.rstrip("/") 100 source = source.rstrip('/')
103 if not source.endswith(".git"): 101 if not source.endswith('.git'):
104 source += ".git" 102 source += '.git'
105 subprocess.check_call(["git", "clone", "--quiet", source, target]) 103 subprocess.check_call(['git', 'clone', '--quiet', source, target])
106 104
107 def get_revision_id(self, repo, rev="HEAD"): 105 def get_revision_id(self, repo, rev='HEAD'):
108 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 106 command = ['git', 'rev-parse', '--revs-only', rev + '^{commit}']
109 return subprocess.check_output(command, cwd=repo).strip() 107 return subprocess.check_output(command, cwd=repo).strip()
110 108
111 def pull(self, repo): 109 def pull(self, repo):
112 # Fetch tracked branches, new tags and the list of available remote bran ches 110 # Fetch tracked branches, new tags and the list of available remote bran ches
113 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cw d=repo) 111 subprocess.check_call(['git', 'fetch', '--quiet', '--all', '--tags'], cw d=repo)
114 # Next we need to ensure all remote branches are tracked 112 # Next we need to ensure all remote branches are tracked
115 newly_tracked = False 113 newly_tracked = False
116 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=re po) 114 remotes = subprocess.check_output(['git', 'branch', '--remotes'], cwd=re po)
117 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): 115 for match in re.finditer(r'^\s*(origin/(\S+))$', remotes, re.M):
118 remote, local = match.groups() 116 remote, local = match.groups()
119 with open(os.devnull, "wb") as devnull: 117 with open(os.devnull, 'wb') as devnull:
120 if subprocess.call(["git", "branch", "--track", local, remote], 118 if subprocess.call(['git', 'branch', '--track', local, remote],
121 cwd=repo, stdout=devnull, stderr=devnull) == 0: 119 cwd=repo, stdout=devnull, stderr=devnull) == 0:
122 newly_tracked = True 120 newly_tracked = True
123 # Finally fetch any newly tracked remote branches 121 # Finally fetch any newly tracked remote branches
124 if newly_tracked: 122 if newly_tracked:
125 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=rep o) 123 subprocess.check_call(['git', 'fetch', '--quiet', 'origin'], cwd=rep o)
126 124
127 def update(self, repo, rev, revname): 125 def update(self, repo, rev, revname):
128 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo) 126 subprocess.check_call(['git', 'checkout', '--quiet', revname], cwd=repo)
129 127
130 def ignore(self, target, repo): 128 def ignore(self, target, repo):
131 module = os.path.sep + os.path.relpath(target, repo) 129 module = os.path.sep + os.path.relpath(target, repo)
132 exclude_file = os.path.join(repo, ".git", "info", "exclude") 130 exclude_file = os.path.join(repo, '.git', 'info', 'exclude')
133 _ensure_line_exists(exclude_file, module) 131 _ensure_line_exists(exclude_file, module)
134 132
135 def postprocess_url(self, url): 133 def postprocess_url(self, url):
136 # Handle alternative syntax of SSH URLS 134 # Handle alternative syntax of SSH URLS
137 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 135 if '@' in url and ':' in url and not urlparse.urlsplit(url).scheme:
138 return "ssh://" + url.replace(":", "/", 1) 136 return 'ssh://' + url.replace(':', '/', 1)
139 return url 137 return url
140 138
139
141 repo_types = OrderedDict(( 140 repo_types = OrderedDict((
142 ("hg", Mercurial()), 141 ('hg', Mercurial()),
143 ("git", Git()), 142 ('git', Git()),
144 )) 143 ))
145 144
146 # [vcs:]value 145 # [vcs:]value
147 item_regexp = re.compile( 146 item_regexp = re.compile(
148 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?" 147 '^(?:(' + '|'.join(map(re.escape, repo_types.keys())) + '):)?'
149 "(.+)$" 148 '(.+)$'
150 ) 149 )
151 150
152 # [url@]rev 151 # [url@]rev
153 source_regexp = re.compile( 152 source_regexp = re.compile(
154 "^(?:(.*)@)?" 153 '^(?:(.*)@)?'
155 "(.+)$" 154 '(.+)$'
156 ) 155 )
157 156
158 157
159 def merge_seqs(seq1, seq2): 158 def merge_seqs(seq1, seq2):
160 """Return a list of any truthy values from the suplied sequences 159 """Return a list of any truthy values from the suplied sequences
161 160
162 (None, 2), (1,) => [1, 2] 161 (None, 2), (1,) => [1, 2]
163 None, (1, 2) => [1, 2] 162 None, (1, 2) => [1, 2]
164 (1, 2), (3, 4) => [3, 4] 163 (1, 2), (3, 4) => [3, 4]
165 """ 164 """
166 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ()) 165 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
167 166
168 167
169 def parse_spec(path, line): 168 def parse_spec(path, line):
170 if "=" not in line: 169 if '=' not in line:
171 logging.warning("Invalid line in file %s: %s" % (path, line)) 170 logging.warning('Invalid line in file %s: %s' % (path, line))
172 return None, None 171 return None, None
173 172
174 key, value = line.split("=", 1) 173 key, value = line.split('=', 1)
175 key = key.strip() 174 key = key.strip()
176 items = value.split() 175 items = value.split()
177 if not len(items): 176 if not len(items):
178 logging.warning("No value specified for key %s in file %s" % (key, path) ) 177 logging.warning('No value specified for key %s in file %s' % (key, path) )
179 return key, None 178 return key, None
180 179
181 result = OrderedDict() 180 result = OrderedDict()
182 is_dependency_field = not key.startswith("_") 181 is_dependency_field = not key.startswith('_')
183 182
184 for i, item in enumerate(items): 183 for i, item in enumerate(items):
185 try: 184 try:
186 vcs, value = re.search(item_regexp, item).groups() 185 vcs, value = re.search(item_regexp, item).groups()
187 vcs = vcs or "*" 186 vcs = vcs or '*'
188 if is_dependency_field: 187 if is_dependency_field:
189 if i == 0 and vcs == "*": 188 if i == 0 and vcs == '*':
190 # In order to be backwards compatible we have to assume that the first 189 # In order to be backwards compatible we have to assume that the first
191 # source contains only a URL/path for the repo if it does no t contain 190 # source contains only a URL/path for the repo if it does no t contain
192 # the VCS part 191 # the VCS part
193 url_rev = (value, None) 192 url_rev = (value, None)
194 else: 193 else:
195 url_rev = re.search(source_regexp, value).groups() 194 url_rev = re.search(source_regexp, value).groups()
196 result[vcs] = merge_seqs(result.get(vcs), url_rev) 195 result[vcs] = merge_seqs(result.get(vcs), url_rev)
197 else: 196 else:
198 if vcs in result: 197 if vcs in result:
199 logging.warning("Ignoring duplicate value for type %r " 198 logging.warning('Ignoring duplicate value for type %r '
200 "(key %r in file %r)" % (vcs, key, path)) 199 '(key %r in file %r)' % (vcs, key, path))
201 result[vcs] = value 200 result[vcs] = value
202 except AttributeError: 201 except AttributeError:
203 logging.warning("Ignoring invalid item %r for type %r " 202 logging.warning('Ignoring invalid item %r for type %r '
204 "(key %r in file %r)" % (item, vcs, key, path)) 203 '(key %r in file %r)' % (item, vcs, key, path))
205 continue 204 continue
206 return key, result 205 return key, result
207 206
208 207
209 def read_deps(repodir): 208 def read_deps(repodir):
210 result = {} 209 result = {}
211 deps_path = os.path.join(repodir, "dependencies") 210 deps_path = os.path.join(repodir, 'dependencies')
212 try: 211 try:
213 with io.open(deps_path, "rt", encoding="utf-8") as handle: 212 with io.open(deps_path, 'rt', encoding='utf-8') as handle:
214 for line in handle: 213 for line in handle:
215 # Remove comments and whitespace 214 # Remove comments and whitespace
216 line = re.sub(r"#.*", "", line).strip() 215 line = re.sub(r'#.*', '', line).strip()
217 if not line: 216 if not line:
218 continue 217 continue
219 218
220 key, spec = parse_spec(deps_path, line) 219 key, spec = parse_spec(deps_path, line)
221 if spec: 220 if spec:
222 result[key] = spec 221 result[key] = spec
223 return result 222 return result
224 except IOError, e: 223 except IOError as e:
225 if e.errno != errno.ENOENT: 224 if e.errno != errno.ENOENT:
226 raise 225 raise
227 return None 226 return None
228 227
229 228
230 def safe_join(path, subpath): 229 def safe_join(path, subpath):
231 # This has been inspired by Flask's safe_join() function 230 # This has been inspired by Flask's safe_join() function
232 forbidden = {os.sep, os.altsep} - {posixpath.sep, None} 231 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
233 if any(sep in subpath for sep in forbidden): 232 if any(sep in subpath for sep in forbidden):
234 raise Exception("Illegal directory separator in dependency path %s" % su bpath) 233 raise Exception('Illegal directory separator in dependency path %s' % su bpath)
235 234
236 normpath = posixpath.normpath(subpath) 235 normpath = posixpath.normpath(subpath)
237 if posixpath.isabs(normpath): 236 if posixpath.isabs(normpath):
238 raise Exception("Dependency path %s cannot be absolute" % subpath) 237 raise Exception('Dependency path %s cannot be absolute' % subpath)
239 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep): 238 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
240 raise Exception("Dependency path %s has to be inside the repository" % s ubpath) 239 raise Exception('Dependency path %s has to be inside the repository' % s ubpath)
241 return os.path.join(path, *normpath.split(posixpath.sep)) 240 return os.path.join(path, *normpath.split(posixpath.sep))
242 241
243 242
244 def get_repo_type(repo): 243 def get_repo_type(repo):
245 for name, repotype in repo_types.iteritems(): 244 for name, repotype in repo_types.iteritems():
246 if repotype.istype(repo): 245 if repotype.istype(repo):
247 return name 246 return name
248 return "hg" 247 return 'hg'
248
249
250 def resolve_npm_dependencies(target, vcs):
251 """Install Node.js production-only dependencies if necessary and desired.
252
253 When the target dependency has additional Node.js dependencies declared
254 run "npm install --only=production --loglevel=warn" to resolve the declared
255 dependencies.
256
257 Additionally, make sure that any VCS will ignore the installed files.
258
259 Requires Node.js to be installed locally.
260 """
261 try:
262 with open(os.path.join(target, 'package.json'), 'r') as fp:
263 package_data = json.load(fp)
264
265 # In case a package.json does not exist at all or if there are no
266 # production dependencies declared, we don't need to run npm and can
267 # bail out early.
268 if not package_data.get('dependencies', False):
269 return
270 except IOError:
271 return
272
273 try:
274 cmd = ['npm', 'install', '--only=production', '--loglevel=warn']
275 subprocess.check_output(cmd, cwd=target)
276
277 repo_types[vcs].ignore(os.path.join(target, 'node_modules'), target)
278 except OSError as e:
279 import errno
280 if e.errno == errno.ENOENT:
281 logging.error('Failed to install Node.js dependencies for %s,'
282 ' please ensure Node.js is installed.', target)
283 else:
284 raise
249 285
250 286
251 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): 287 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
252 if os.path.exists(target): 288 if os.path.exists(target):
253 return 289 return False
254 290
255 if SKIP_DEPENDENCY_UPDATES: 291 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 292 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
257 "%s not cloned", target) 293 '%s not cloned', target)
258 return 294 return False
259 295
260 postprocess_url = repo_types[type].postprocess_url 296 postprocess_url = repo_types[type].postprocess_url
261 root = postprocess_url(root) 297 root = postprocess_url(root)
262 sourcename = postprocess_url(sourcename) 298 sourcename = postprocess_url(sourcename)
263 299
264 if os.path.exists(root): 300 if os.path.exists(root):
265 url = os.path.join(root, sourcename) 301 url = os.path.join(root, sourcename)
266 else: 302 else:
267 url = urlparse.urljoin(root, sourcename) 303 url = urlparse.urljoin(root, sourcename)
268 304
269 logging.info("Cloning repository %s into %s" % (url, target)) 305 logging.info('Cloning repository %s into %s' % (url, target))
270 repo_types[type].clone(url, target) 306 repo_types[type].clone(url, target)
271 repo_types[parenttype].ignore(target, parentrepo) 307 repo_types[parenttype].ignore(target, parentrepo)
308 return True
272 309
273 310
274 def update_repo(target, type, revision): 311 def update_repo(target, type, revision):
275 resolved_revision = repo_types[type].get_revision_id(target, revision) 312 resolved_revision = repo_types[type].get_revision_id(target, revision)
276 current_revision = repo_types[type].get_revision_id(target) 313 current_revision = repo_types[type].get_revision_id(target)
277 314
278 if resolved_revision != current_revision: 315 if resolved_revision != current_revision:
279 if SKIP_DEPENDENCY_UPDATES: 316 if SKIP_DEPENDENCY_UPDATES:
280 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 317 logging.warning('SKIP_DEPENDENCY_UPDATES environment variable set, '
281 "%s not checked out to %s", target, revision) 318 '%s not checked out to %s', target, revision)
282 return 319 return False
283 320
284 if not resolved_revision: 321 if not resolved_revision:
285 logging.info("Revision %s is unknown, downloading remote changes" % revision) 322 logging.info('Revision %s is unknown, downloading remote changes' % revision)
286 repo_types[type].pull(target) 323 repo_types[type].pull(target)
287 resolved_revision = repo_types[type].get_revision_id(target, revisio n) 324 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
288 if not resolved_revision: 325 if not resolved_revision:
289 raise Exception("Failed to resolve revision %s" % revision) 326 raise Exception('Failed to resolve revision %s' % revision)
290 327
291 logging.info("Updating repository %s to revision %s" % (target, resolved _revision)) 328 logging.info('Updating repository %s to revision %s' % (target, resolved _revision))
292 repo_types[type].update(target, resolved_revision, revision) 329 repo_types[type].update(target, resolved_revision, revision)
330 return True
331 return False
293 332
294 333
295 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 334 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
296 config = read_deps(repodir) 335 config = read_deps(repodir)
297 if config is None: 336 if config is None:
298 if level == 0: 337 if level == 0:
299 logging.warning("No dependencies file in directory %s, nothing to do ...\n%s" % (repodir, USAGE)) 338 logging.warning('No dependencies file in directory %s, nothing to do ...\n%s' % (repodir, USAGE))
300 return 339 return
301 if level >= 10: 340 if level >= 10:
302 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 341 logging.warning('Too much subrepository nesting, ignoring %s' % repo)
303 return 342 return
304 343
305 if overrideroots is not None: 344 if overrideroots is not None:
306 config["_root"] = overrideroots 345 config['_root'] = overrideroots
307 346
308 for dir, sources in config.iteritems(): 347 for dir, sources in sorted(config.iteritems()):
309 if (dir.startswith("_") or 348 if (dir.startswith('_') or
310 skipdependencies.intersection([s[0] for s in sources if s[0]])): 349 skipdependencies.intersection([s[0] for s in sources if s[0]])):
311 continue 350 continue
312 351
313 target = safe_join(repodir, dir) 352 target = safe_join(repodir, dir)
314 parenttype = get_repo_type(repodir) 353 parenttype = get_repo_type(repodir)
315 _root = config.get("_root", {}) 354 _root = config.get('_root', {})
316 355
317 for key in sources.keys() + _root.keys(): 356 for key in sources.keys() + _root.keys():
318 if key == parenttype or key is None and vcs != "*": 357 if key == parenttype or key is None and vcs != '*':
319 vcs = key 358 vcs = key
320 source, rev = merge_seqs(sources.get("*"), sources.get(vcs)) 359 source, rev = merge_seqs(sources.get('*'), sources.get(vcs))
321 360
322 if not (vcs and source and rev): 361 if not (vcs and source and rev):
323 logging.warning("No valid source / revision found to create %s" % ta rget) 362 logging.warning('No valid source / revision found to create %s' % ta rget)
324 continue 363 continue
325 364
326 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source ) 365 repo_cloned = ensure_repo(repodir, parenttype, target, vcs,
327 update_repo(target, vcs, rev) 366 _root.get(vcs, ''), source)
367 repo_updated = update_repo(target, vcs, rev)
368 if repo_cloned or repo_updated:
369 resolve_npm_dependencies(target, vcs)
328 resolve_deps(target, level + 1, self_update=False, 370 resolve_deps(target, level + 1, self_update=False,
329 overrideroots=overrideroots, skipdependencies=skipdependenc ies) 371 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
330 372
331 if self_update and "_self" in config and "*" in config["_self"]: 373 if self_update and '_self' in config and '*' in config['_self']:
332 source = safe_join(repodir, config["_self"]["*"]) 374 source = safe_join(repodir, config['_self']['*'])
333 try: 375 try:
334 with io.open(source, "rb") as handle: 376 with io.open(source, 'rb') as handle:
335 sourcedata = handle.read() 377 sourcedata = handle.read()
336 except IOError, e: 378 except IOError as e:
337 if e.errno != errno.ENOENT: 379 if e.errno != errno.ENOENT:
338 raise 380 raise
339 logging.warning("File %s doesn't exist, skipping self-update" % sour ce) 381 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
340 return 382 return
341 383
342 target = __file__ 384 target = __file__
343 with io.open(target, "rb") as handle: 385 with io.open(target, 'rb') as handle:
344 targetdata = handle.read() 386 targetdata = handle.read()
345 387
346 if sourcedata != targetdata: 388 if sourcedata != targetdata:
347 logging.info("Updating %s from %s, don't forget to commit" % (target , source)) 389 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
348 with io.open(target, "wb") as handle: 390 with io.open(target, 'wb') as handle:
349 handle.write(sourcedata) 391 handle.write(sourcedata)
350 if __name__ == "__main__": 392 if __name__ == '__main__':
351 logging.info("Restarting %s" % target) 393 logging.info('Restarting %s' % target)
352 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] ) 394 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
353 else: 395 else:
354 logging.warning("Cannot restart %s automatically, please rerun" % target) 396 logging.warning('Cannot restart %s automatically, please rerun' % target)
355 397
356 398
357 def _ensure_line_exists(path, pattern): 399 def _ensure_line_exists(path, pattern):
358 with open(path, 'a+') as f: 400 with open(path, 'a+') as f:
401 f.seek(0, os.SEEK_SET)
359 file_content = [l.strip() for l in f.readlines()] 402 file_content = [l.strip() for l in f.readlines()]
360 if not pattern in file_content: 403 if not pattern in file_content:
361 file_content.append(pattern) 404 file_content.append(pattern)
362 f.seek(0, os.SEEK_SET) 405 f.seek(0, os.SEEK_SET)
363 f.truncate() 406 f.truncate()
364 for l in file_content: 407 for l in file_content:
365 print >>f, l 408 print >>f, l
366 409
367 if __name__ == "__main__": 410
411 if __name__ == '__main__':
368 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 412 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
369 413
370 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 414 parser = argparse.ArgumentParser(description='Verify dependencies for a set of repositories, by default the repository of this script.')
371 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help ="Repository path") 415 parser.add_argument('repos', metavar='repository', type=str, nargs='*', help ='Repository path')
372 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress inf ormational output") 416 parser.add_argument('-q', '--quiet', action='store_true', help='Suppress inf ormational output')
373 args = parser.parse_args() 417 args = parser.parse_args()
374 418
375 if args.quiet: 419 if args.quiet:
376 logging.disable(logging.INFO) 420 logging.disable(logging.INFO)
377 421
378 repos = args.repos 422 repos = args.repos
379 if not len(repos): 423 if not len(repos):
380 repos = [os.path.dirname(__file__)] 424 repos = [os.path.dirname(__file__)]
381 for repo in repos: 425 for repo in repos:
382 resolve_deps(repo) 426 resolve_deps(repo)
LEFTRIGHT

Powered by Google App Engine
This is Rietveld