Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29329056: Issue 3194 - Allow multiple sources for a dependency (Closed)
Patch Set: Implemented Sebastian's suggested syntax Created Oct. 15, 2015, 2:53 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8 2 # coding: utf-8
3 3
4 # This Source Code Form is subject to the terms of the Mozilla Public 4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 7
8 import sys 8 import sys
9 import os 9 import os
10 import posixpath 10 import posixpath
(...skipping 14 matching lines...) Expand all
25 # VCS-specific root URLs for the repositories 25 # VCS-specific root URLs for the repositories
26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/ 26 _root = hg:https://hg.adblockplus.org/ git:https://github.com/adblockplus/
27 # File to update this script from (optional) 27 # File to update this script from (optional)
28 _self = buildtools/ensure_dependencies.py 28 _self = buildtools/ensure_dependencies.py
29 # Check out elemhidehelper repository into extensions/elemhidehelper directory 29 # Check out elemhidehelper repository into extensions/elemhidehelper directory
30 # at tag "1.2". 30 # at tag "1.2".
31 extensions/elemhidehelper = elemhidehelper 1.2 31 extensions/elemhidehelper = elemhidehelper 1.2
32 # Check out buildtools repository into buildtools directory at VCS-specific 32 # Check out buildtools repository into buildtools directory at VCS-specific
33 # revision IDs. 33 # revision IDs.
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Check out the adblockplus repository into adblockplus directory, overwriting
Sebastian Noack 2015/10/15 15:19:41 Nit: "Check out" isn't hg/git terminology.
kzar 2015/10/15 16:12:25 Done.
36 # the usual source URL for Git repository and specifying VCS specific revision
37 # IDs.
38 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:kzar/adblockplus. git@b2ffd52b
39 # Check out the adblockpluschrome repository into the adblockpluschrome
40 # directory, from a specific Git repository, specifying the revision ID.
41 adblockpluschrome = git:git@github.com:kzar/adblockpluschrome.git@1fad3a7
35 """ 42 """
36 43
37 SKIP_DEPENDENCY_UPDATES = os.environ.get( 44 SKIP_DEPENDENCY_UPDATES = os.environ.get(
38 "SKIP_DEPENDENCY_UPDATES", "" 45 "SKIP_DEPENDENCY_UPDATES", ""
39 ).lower() not in ("", "0", "false") 46 ).lower() not in ("", "0", "false")
40 47
41 class Mercurial(): 48 class Mercurial():
42 def istype(self, repodir): 49 def istype(self, repodir):
43 return os.path.exists(os.path.join(repodir, ".hg")) 50 return os.path.exists(os.path.join(repodir, ".hg"))
44 51
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
128 # Handle alternative syntax of SSH URLS 135 # Handle alternative syntax of SSH URLS
129 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 136 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
130 return "ssh://" + url.replace(":", "/", 1) 137 return "ssh://" + url.replace(":", "/", 1)
131 return url 138 return url
132 139
133 repo_types = OrderedDict(( 140 repo_types = OrderedDict((
134 ("hg", Mercurial()), 141 ("hg", Mercurial()),
135 ("git", Git()), 142 ("git", Git()),
136 )) 143 ))
137 144
145 # [vcs:]value
146 item_regexp = re.compile(
147 "^(?:(" + "|".join(repo_types.keys()) +"):)?"
148 "(.+)$"
149 )
150
151 # [url@]rev
152 source_regexp = re.compile(
153 "^(?:(.*)@)?"
154 "(.+)$"
155 )
156
157 def merge_tuples(tuple_1, tuple_2):
158 """Return tuple containing any truthy values from the suplied tuples
159
160 (None, 2), (1,) => (1, 2)
161 None, (1, 2) => (1, 2)
162 (1, 2), (3, 4) => (3, 4)
163 """
164 return tuple(i2 or i1 for i1, i2 in map(None, tuple_1 or (), tuple_2 or ()))
165
138 def parse_spec(path, line): 166 def parse_spec(path, line):
139 if "=" not in line: 167 if "=" not in line:
140 logging.warning("Invalid line in file %s: %s" % (path, line)) 168 logging.warning("Invalid line in file %s: %s" % (path, line))
141 return None, None 169 return None, None
142 170
143 key, value = line.split("=", 1) 171 key, value = line.split("=", 1)
144 key = key.strip() 172 key = key.strip()
145 items = value.split() 173 items = value.split()
146 if not len(items): 174 if not len(items):
147 logging.warning("No value specified for key %s in file %s" % (key, path)) 175 logging.warning("No value specified for key %s in file %s" % (key, path))
148 return key, None 176 return key, None
149 177
150 result = OrderedDict() 178 result = OrderedDict()
151 if not key.startswith("_"): 179 is_dependency_field = not key.startswith("_")
152 result["_source"] = items.pop(0)
153 180
154 for item in items: 181 for i, item in enumerate(items):
155 if ":" in item: 182 try:
156 type, value = item.split(":", 1) 183 vcs, value = re.match(item_regexp, item).groups()
157 else: 184 vcs = vcs or "*"
158 type, value = ("*", item) 185 if is_dependency_field:
159 if type in result: 186 if i == 0 and vcs == "*":
160 logging.warning("Ignoring duplicate value for type %s (key %s in file %s)" % (type, key, path)) 187 # In order to be backwards compatible we have to assume that the first
161 else: 188 # source contains only a URL/path for the repo if it does not contain
162 result[type] = value 189 # the VCS part
190 url_rev = (value, None)
191 else:
192 url_rev = re.match(source_regexp, value).groups()
Sebastian Noack 2015/10/15 15:19:41 We decided a while ago to not use .match() anymore
kzar 2015/10/15 16:12:25 Done.
193 result[vcs] = merge_tuples(result.get(vcs), url_rev)
194 else:
195 if vcs in result:
196 logging.warning("Ignoring duplicate value for type %s"
197 "(key %s in file %s)" % (vcs, key, path))
198 result[vcs] = value
199 except AttributeError:
200 logging.warning("Ignoring invalid item '%s' for type %s"
Sebastian Noack 2015/10/15 15:19:41 How about using %r instead '%s'?
kzar 2015/10/15 16:12:25 (Some further testing showed it's clearer if all t
201 "(key %s in file %s)" % (item, vcs, key, path))
202 continue
163 return key, result 203 return key, result
164 204
165 def read_deps(repodir): 205 def read_deps(repodir):
166 result = {} 206 result = {}
167 deps_path = os.path.join(repodir, "dependencies") 207 deps_path = os.path.join(repodir, "dependencies")
168 try: 208 try:
169 with io.open(deps_path, "rt", encoding="utf-8") as handle: 209 with io.open(deps_path, "rt", encoding="utf-8") as handle:
170 for line in handle: 210 for line in handle:
171 # Remove comments and whitespace 211 # Remove comments and whitespace
172 line = re.sub(r"#.*", "", line).strip() 212 line = re.sub(r"#.*", "", line).strip()
(...skipping 21 matching lines...) Expand all
194 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep): 234 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep):
195 raise Exception("Dependency path %s has to be inside the repository" % subpa th) 235 raise Exception("Dependency path %s has to be inside the repository" % subpa th)
196 return os.path.join(path, *normpath.split(posixpath.sep)) 236 return os.path.join(path, *normpath.split(posixpath.sep))
197 237
198 def get_repo_type(repo): 238 def get_repo_type(repo):
199 for name, repotype in repo_types.iteritems(): 239 for name, repotype in repo_types.iteritems():
200 if repotype.istype(repo): 240 if repotype.istype(repo):
201 return name 241 return name
202 return None 242 return None
203 243
204 def ensure_repo(parentrepo, target, roots, sourcename): 244 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
205 if os.path.exists(target): 245 if os.path.exists(target):
206 return 246 return
207 247
208 if SKIP_DEPENDENCY_UPDATES: 248 if SKIP_DEPENDENCY_UPDATES:
209 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 249 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
210 "%s not cloned", target) 250 "%s not cloned", target)
211 return 251 return
212 252
213 parenttype = get_repo_type(parentrepo)
214 type = None
215 for key in roots:
216 if key == parenttype or (key in repo_types and type is None):
217 type = key
218 if type is None:
219 raise Exception("No valid source found to create %s" % target)
220
221 postprocess_url = repo_types[type].postprocess_url 253 postprocess_url = repo_types[type].postprocess_url
222 root = postprocess_url(roots[type]) 254 root = postprocess_url(root)
223 sourcename = postprocess_url(sourcename) 255 sourcename = postprocess_url(sourcename)
224 256
225 if os.path.exists(root): 257 if os.path.exists(root):
226 url = os.path.join(root, sourcename) 258 url = os.path.join(root, sourcename)
227 else: 259 else:
228 url = urlparse.urljoin(root, sourcename) 260 url = urlparse.urljoin(root, sourcename)
229 261
230 logging.info("Cloning repository %s into %s" % (url, target)) 262 logging.info("Cloning repository %s into %s" % (url, target))
231 repo_types[type].clone(url, target) 263 repo_types[type].clone(url, target)
264 repo_types[parenttype].ignore(target, parentrepo)
232 265
233 for repo in repo_types.itervalues(): 266 def update_repo(target, type, revision):
234 if repo.istype(parentrepo):
235 repo.ignore(target, parentrepo)
236
237 def update_repo(target, revisions):
238 type = get_repo_type(target)
239 if type is None:
240 logging.warning("Type of repository %s unknown, skipping update" % target)
241 return
242
243 if type in revisions:
244 revision = revisions[type]
245 elif "*" in revisions:
246 revision = revisions["*"]
247 else:
248 logging.warning("No revision specified for repository %s (type %s), skipping update" % (target, type))
249 return
250
251 resolved_revision = repo_types[type].get_revision_id(target, revision) 267 resolved_revision = repo_types[type].get_revision_id(target, revision)
252 current_revision = repo_types[type].get_revision_id(target) 268 current_revision = repo_types[type].get_revision_id(target)
253 269
254 if resolved_revision != current_revision: 270 if resolved_revision != current_revision:
255 if SKIP_DEPENDENCY_UPDATES: 271 if SKIP_DEPENDENCY_UPDATES:
256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 272 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
257 "%s not checked out to %s", target, revision) 273 "%s not checked out to %s", target, revision)
258 return 274 return
259 275
260 if not resolved_revision: 276 if not resolved_revision:
(...skipping 12 matching lines...) Expand all
273 if level == 0: 289 if level == 0:
274 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 290 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE))
275 return 291 return
276 if level >= 10: 292 if level >= 10:
277 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 293 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
278 return 294 return
279 295
280 if overrideroots is not None: 296 if overrideroots is not None:
281 config["_root"] = overrideroots 297 config["_root"] = overrideroots
282 298
283 for dir, revisions in config.iteritems(): 299 for dir, sources in config.iteritems():
284 if dir.startswith("_") or revisions["_source"] in skipdependencies: 300 if (dir.startswith("_") or
301 skipdependencies.intersection([s[0] for s in sources if s[0]])):
285 continue 302 continue
303
286 target = safe_join(repodir, dir) 304 target = safe_join(repodir, dir)
287 ensure_repo(repodir, target, config.get("_root", {}), revisions["_source"]) 305 parenttype = get_repo_type(repodir)
288 update_repo(target, revisions) 306 _root = config.get("_root", {})
289 resolve_deps(target, level + 1, self_update=False, overrideroots=overrideroo ts, skipdependencies=skipdependencies) 307
308 for key in sources.keys() + _root.keys():
309 if key == parenttype or key is None and vcs != "*":
310 vcs = key
311 source, rev = merge_tuples(sources.get("*"), sources.get(vcs))
312
313 if not (vcs and source and rev):
314 logging.warning("No valid source / revision found to create %s" % target)
315 continue
316
317 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source)
318 update_repo(target, vcs, rev)
319 resolve_deps(target, level + 1, self_update=False,
320 overrideroots=overrideroots, skipdependencies=skipdependencies)
290 321
291 if self_update and "_self" in config and "*" in config["_self"]: 322 if self_update and "_self" in config and "*" in config["_self"]:
292 source = safe_join(repodir, config["_self"]["*"]) 323 source = safe_join(repodir, config["_self"]["*"])
293 try: 324 try:
294 with io.open(source, "rb") as handle: 325 with io.open(source, "rb") as handle:
295 sourcedata = handle.read() 326 sourcedata = handle.read()
296 except IOError, e: 327 except IOError, e:
297 if e.errno != errno.ENOENT: 328 if e.errno != errno.ENOENT:
298 raise 329 raise
299 logging.warning("File %s doesn't exist, skipping self-update" % source) 330 logging.warning("File %s doesn't exist, skipping self-update" % source)
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
332 args = parser.parse_args() 363 args = parser.parse_args()
333 364
334 if args.quiet: 365 if args.quiet:
335 logging.disable(logging.INFO) 366 logging.disable(logging.INFO)
336 367
337 repos = args.repos 368 repos = args.repos
338 if not len(repos): 369 if not len(repos):
339 repos = [os.path.dirname(__file__)] 370 repos = [os.path.dirname(__file__)]
340 for repo in repos: 371 for repo in repos:
341 resolve_deps(repo) 372 resolve_deps(repo)
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld