Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: ensure_dependencies.py

Issue 29340602: Issue 2596 - change adblockplus to adblockpluscore (Closed)
Patch Set: rebase Created April 21, 2016, 11:41 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « dependencies ('k') | libadblockplus.gyp » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8 2 # coding: utf-8
3 3
4 # This Source Code Form is subject to the terms of the Mozilla Public 4 # This Source Code Form is subject to the terms of the Mozilla Public
5 # License, v. 2.0. If a copy of the MPL was not distributed with this 5 # License, v. 2.0. If a copy of the MPL was not distributed with this
6 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
7 7
8 import sys 8 import sys
9 import os 9 import os
10 import posixpath 10 import posixpath
(...skipping 23 matching lines...) Expand all
34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5 34 buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
35 # Clone the adblockplus repository into adblockplus directory, overwriting the 35 # Clone the adblockplus repository into adblockplus directory, overwriting the
36 # usual source URL for Git repository and specifying VCS specific revision IDs . 36 # usual source URL for Git repository and specifying VCS specific revision IDs .
37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b 37 adblockplus = adblockplus hg:893426c6a6ab git:git@github.com:user/adblockplus. git@b2ffd52b
38 # Clone the adblockpluschrome repository into the adblockpluschrome directory, 38 # Clone the adblockpluschrome repository into the adblockpluschrome directory,
39 # from a specific Git repository, specifying the revision ID. 39 # from a specific Git repository, specifying the revision ID.
40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7 40 adblockpluschrome = git:git@github.com:user/adblockpluschrome.git@1fad3a7
41 """ 41 """
42 42
43 SKIP_DEPENDENCY_UPDATES = os.environ.get( 43 SKIP_DEPENDENCY_UPDATES = os.environ.get(
44 "SKIP_DEPENDENCY_UPDATES", "" 44 "SKIP_DEPENDENCY_UPDATES", ""
45 ).lower() not in ("", "0", "false") 45 ).lower() not in ("", "0", "false")
46 46
47
47 class Mercurial(): 48 class Mercurial():
48 def istype(self, repodir): 49 def istype(self, repodir):
49 return os.path.exists(os.path.join(repodir, ".hg")) 50 return os.path.exists(os.path.join(repodir, ".hg"))
50 51
51 def clone(self, source, target): 52 def clone(self, source, target):
52 if not source.endswith("/"): 53 if not source.endswith("/"):
53 source += "/" 54 source += "/"
54 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, targe t]) 55 subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, t arget])
55 56
56 def get_revision_id(self, repo, rev=None): 57 def get_revision_id(self, repo, rev=None):
57 command = ["hg", "id", "--repository", repo, "--id"] 58 command = ["hg", "id", "--repository", repo, "--id"]
58 if rev: 59 if rev:
59 command.extend(["--rev", rev]) 60 command.extend(["--rev", rev])
60 61
61 # Ignore stderr output and return code here: if revision lookup failed we 62 # Ignore stderr output and return code here: if revision lookup failed w e
62 # should simply return an empty string. 63 # should simply return an empty string.
63 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess .PIPE).communicate()[0] 64 result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subpro cess.PIPE).communicate()[0]
64 return result.strip() 65 return result.strip()
65 66
66 def pull(self, repo): 67 def pull(self, repo):
67 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"]) 68 subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"])
68 69
69 def update(self, repo, rev, revname): 70 def update(self, repo, rev, revname):
70 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--c heck", "--rev", rev]) 71 subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev])
71 72
72 def ignore(self, target, repo): 73 def ignore(self, target, repo):
73 74
74 if not self.istype(target): 75 if not self.istype(target):
75 76
76 config_path = os.path.join(repo, ".hg", "hgrc") 77 config_path = os.path.join(repo, ".hg", "hgrc")
77 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies")) 78 ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencie s"))
78 79
79 config = RawConfigParser() 80 config = RawConfigParser()
80 config.read(config_path) 81 config.read(config_path)
81 82
82 if not config.has_section("ui"): 83 if not config.has_section("ui"):
83 config.add_section("ui") 84 config.add_section("ui")
84 85
85 config.set("ui", "ignore.dependencies", ignore_path) 86 config.set("ui", "ignore.dependencies", ignore_path)
86 with open(config_path, "w") as stream: 87 with open(config_path, "w") as stream:
87 config.write(stream) 88 config.write(stream)
88 89
89 module = os.path.relpath(target, repo) 90 module = os.path.relpath(target, repo)
90 _ensure_line_exists(ignore_path, module) 91 _ensure_line_exists(ignore_path, module)
91 92
92 def postprocess_url(self, url): 93 def postprocess_url(self, url):
93 return url 94 return url
95
94 96
95 class Git(): 97 class Git():
96 def istype(self, repodir): 98 def istype(self, repodir):
97 return os.path.exists(os.path.join(repodir, ".git")) 99 return os.path.exists(os.path.join(repodir, ".git"))
98 100
99 def clone(self, source, target): 101 def clone(self, source, target):
100 source = source.rstrip("/") 102 source = source.rstrip("/")
101 if not source.endswith(".git"): 103 if not source.endswith(".git"):
102 source += ".git" 104 source += ".git"
103 subprocess.check_call(["git", "clone", "--quiet", source, target]) 105 subprocess.check_call(["git", "clone", "--quiet", source, target])
104 106
105 def get_revision_id(self, repo, rev="HEAD"): 107 def get_revision_id(self, repo, rev="HEAD"):
106 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}'] 108 command = ["git", "rev-parse", "--revs-only", rev + '^{commit}']
107 return subprocess.check_output(command, cwd=repo).strip() 109 return subprocess.check_output(command, cwd=repo).strip()
108 110
109 def pull(self, repo): 111 def pull(self, repo):
110 # Fetch tracked branches, new tags and the list of available remote branches 112 # Fetch tracked branches, new tags and the list of available remote bran ches
111 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=re po) 113 subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cw d=repo)
112 # Next we need to ensure all remote branches are tracked 114 # Next we need to ensure all remote branches are tracked
113 newly_tracked = False 115 newly_tracked = False
114 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo) 116 remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=re po)
115 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M): 117 for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
116 remote, local = match.groups() 118 remote, local = match.groups()
117 with open(os.devnull, "wb") as devnull: 119 with open(os.devnull, "wb") as devnull:
118 if subprocess.call(["git", "branch", "--track", local, remote], 120 if subprocess.call(["git", "branch", "--track", local, remote],
119 cwd=repo, stdout=devnull, stderr=devnull) == 0: 121 cwd=repo, stdout=devnull, stderr=devnull) == 0:
120 newly_tracked = True 122 newly_tracked = True
121 # Finally fetch any newly tracked remote branches 123 # Finally fetch any newly tracked remote branches
122 if newly_tracked: 124 if newly_tracked:
123 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo) 125 subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=rep o)
124 126
125 def update(self, repo, rev, revname): 127 def update(self, repo, rev, revname):
126 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo) 128 subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo)
127 129
128 def ignore(self, target, repo): 130 def ignore(self, target, repo):
129 module = os.path.sep + os.path.relpath(target, repo) 131 module = os.path.sep + os.path.relpath(target, repo)
130 exclude_file = os.path.join(repo, ".git", "info", "exclude") 132 exclude_file = os.path.join(repo, ".git", "info", "exclude")
131 _ensure_line_exists(exclude_file, module) 133 _ensure_line_exists(exclude_file, module)
132 134
133 def postprocess_url(self, url): 135 def postprocess_url(self, url):
134 # Handle alternative syntax of SSH URLS 136 # Handle alternative syntax of SSH URLS
135 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme: 137 if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
136 return "ssh://" + url.replace(":", "/", 1) 138 return "ssh://" + url.replace(":", "/", 1)
137 return url 139 return url
138 140
139 repo_types = OrderedDict(( 141 repo_types = OrderedDict((
140 ("hg", Mercurial()), 142 ("hg", Mercurial()),
141 ("git", Git()), 143 ("git", Git()),
142 )) 144 ))
143 145
144 # [vcs:]value 146 # [vcs:]value
145 item_regexp = re.compile( 147 item_regexp = re.compile(
146 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) +"):)?" 148 "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?"
147 "(.+)$" 149 "(.+)$"
148 ) 150 )
149 151
150 # [url@]rev 152 # [url@]rev
151 source_regexp = re.compile( 153 source_regexp = re.compile(
152 "^(?:(.*)@)?" 154 "^(?:(.*)@)?"
153 "(.+)$" 155 "(.+)$"
154 ) 156 )
155 157
158
156 def merge_seqs(seq1, seq2): 159 def merge_seqs(seq1, seq2):
157 """Return a list of any truthy values from the suplied sequences 160 """Return a list of any truthy values from the suplied sequences
158 161
159 (None, 2), (1,) => [1, 2] 162 (None, 2), (1,) => [1, 2]
160 None, (1, 2) => [1, 2] 163 None, (1, 2) => [1, 2]
161 (1, 2), (3, 4) => [3, 4] 164 (1, 2), (3, 4) => [3, 4]
162 """ 165 """
163 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ()) 166 return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
167
164 168
165 def parse_spec(path, line): 169 def parse_spec(path, line):
166 if "=" not in line: 170 if "=" not in line:
167 logging.warning("Invalid line in file %s: %s" % (path, line)) 171 logging.warning("Invalid line in file %s: %s" % (path, line))
168 return None, None 172 return None, None
169 173
170 key, value = line.split("=", 1) 174 key, value = line.split("=", 1)
171 key = key.strip() 175 key = key.strip()
172 items = value.split() 176 items = value.split()
173 if not len(items): 177 if not len(items):
174 logging.warning("No value specified for key %s in file %s" % (key, path)) 178 logging.warning("No value specified for key %s in file %s" % (key, path) )
175 return key, None 179 return key, None
176 180
177 result = OrderedDict() 181 result = OrderedDict()
178 is_dependency_field = not key.startswith("_") 182 is_dependency_field = not key.startswith("_")
179 183
180 for i, item in enumerate(items): 184 for i, item in enumerate(items):
185 try:
186 vcs, value = re.search(item_regexp, item).groups()
187 vcs = vcs or "*"
188 if is_dependency_field:
189 if i == 0 and vcs == "*":
190 # In order to be backwards compatible we have to assume that the first
191 # source contains only a URL/path for the repo if it does no t contain
192 # the VCS part
193 url_rev = (value, None)
194 else:
195 url_rev = re.search(source_regexp, value).groups()
196 result[vcs] = merge_seqs(result.get(vcs), url_rev)
197 else:
198 if vcs in result:
199 logging.warning("Ignoring duplicate value for type %r "
200 "(key %r in file %r)" % (vcs, key, path))
201 result[vcs] = value
202 except AttributeError:
203 logging.warning("Ignoring invalid item %r for type %r "
204 "(key %r in file %r)" % (item, vcs, key, path))
205 continue
206 return key, result
207
208
209 def read_deps(repodir):
210 result = {}
211 deps_path = os.path.join(repodir, "dependencies")
181 try: 212 try:
182 vcs, value = re.search(item_regexp, item).groups() 213 with io.open(deps_path, "rt", encoding="utf-8") as handle:
183 vcs = vcs or "*" 214 for line in handle:
184 if is_dependency_field: 215 # Remove comments and whitespace
185 if i == 0 and vcs == "*": 216 line = re.sub(r"#.*", "", line).strip()
186 # In order to be backwards compatible we have to assume that the first 217 if not line:
187 # source contains only a URL/path for the repo if it does not contain 218 continue
188 # the VCS part 219
189 url_rev = (value, None) 220 key, spec = parse_spec(deps_path, line)
190 else: 221 if spec:
191 url_rev = re.search(source_regexp, value).groups() 222 result[key] = spec
192 result[vcs] = merge_seqs(result.get(vcs), url_rev) 223 return result
193 else: 224 except IOError, e:
194 if vcs in result: 225 if e.errno != errno.ENOENT:
195 logging.warning("Ignoring duplicate value for type %r " 226 raise
196 "(key %r in file %r)" % (vcs, key, path)) 227 return None
197 result[vcs] = value 228
198 except AttributeError:
199 logging.warning("Ignoring invalid item %r for type %r "
200 "(key %r in file %r)" % (item, vcs, key, path))
201 continue
202 return key, result
203
204 def read_deps(repodir):
205 result = {}
206 deps_path = os.path.join(repodir, "dependencies")
207 try:
208 with io.open(deps_path, "rt", encoding="utf-8") as handle:
209 for line in handle:
210 # Remove comments and whitespace
211 line = re.sub(r"#.*", "", line).strip()
212 if not line:
213 continue
214
215 key, spec = parse_spec(deps_path, line)
216 if spec:
217 result[key] = spec
218 return result
219 except IOError, e:
220 if e.errno != errno.ENOENT:
221 raise
222 return None
223 229
224 def safe_join(path, subpath): 230 def safe_join(path, subpath):
225 # This has been inspired by Flask's safe_join() function 231 # This has been inspired by Flask's safe_join() function
226 forbidden = {os.sep, os.altsep} - {posixpath.sep, None} 232 forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
227 if any(sep in subpath for sep in forbidden): 233 if any(sep in subpath for sep in forbidden):
228 raise Exception("Illegal directory separator in dependency path %s" % subpat h) 234 raise Exception("Illegal directory separator in dependency path %s" % su bpath)
229 235
230 normpath = posixpath.normpath(subpath) 236 normpath = posixpath.normpath(subpath)
231 if posixpath.isabs(normpath): 237 if posixpath.isabs(normpath):
232 raise Exception("Dependency path %s cannot be absolute" % subpath) 238 raise Exception("Dependency path %s cannot be absolute" % subpath)
233 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posi xpath.sep): 239 if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + po sixpath.sep):
234 raise Exception("Dependency path %s has to be inside the repository" % subpa th) 240 raise Exception("Dependency path %s has to be inside the repository" % s ubpath)
235 return os.path.join(path, *normpath.split(posixpath.sep)) 241 return os.path.join(path, *normpath.split(posixpath.sep))
242
236 243
237 def get_repo_type(repo): 244 def get_repo_type(repo):
238 for name, repotype in repo_types.iteritems(): 245 for name, repotype in repo_types.iteritems():
239 if repotype.istype(repo): 246 if repotype.istype(repo):
240 return name 247 return name
241 return None 248 return "hg"
249
242 250
243 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename): 251 def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
244 if os.path.exists(target): 252 if os.path.exists(target):
245 return 253 return
246 254
247 if SKIP_DEPENDENCY_UPDATES: 255 if SKIP_DEPENDENCY_UPDATES:
248 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 256 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
249 "%s not cloned", target) 257 "%s not cloned", target)
250 return 258 return
251 259
252 postprocess_url = repo_types[type].postprocess_url 260 postprocess_url = repo_types[type].postprocess_url
253 root = postprocess_url(root) 261 root = postprocess_url(root)
254 sourcename = postprocess_url(sourcename) 262 sourcename = postprocess_url(sourcename)
255 263
256 if os.path.exists(root): 264 if os.path.exists(root):
257 url = os.path.join(root, sourcename) 265 url = os.path.join(root, sourcename)
258 else: 266 else:
259 url = urlparse.urljoin(root, sourcename) 267 url = urlparse.urljoin(root, sourcename)
260 268
261 logging.info("Cloning repository %s into %s" % (url, target)) 269 logging.info("Cloning repository %s into %s" % (url, target))
262 repo_types[type].clone(url, target) 270 repo_types[type].clone(url, target)
263 repo_types[parenttype].ignore(target, parentrepo) 271 repo_types[parenttype].ignore(target, parentrepo)
272
264 273
265 def update_repo(target, type, revision): 274 def update_repo(target, type, revision):
266 resolved_revision = repo_types[type].get_revision_id(target, revision) 275 resolved_revision = repo_types[type].get_revision_id(target, revision)
267 current_revision = repo_types[type].get_revision_id(target) 276 current_revision = repo_types[type].get_revision_id(target)
268 277
269 if resolved_revision != current_revision: 278 if resolved_revision != current_revision:
270 if SKIP_DEPENDENCY_UPDATES: 279 if SKIP_DEPENDENCY_UPDATES:
271 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, " 280 logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
272 "%s not checked out to %s", target, revision) 281 "%s not checked out to %s", target, revision)
273 return 282 return
274 283
275 if not resolved_revision: 284 if not resolved_revision:
276 logging.info("Revision %s is unknown, downloading remote changes" % revisi on) 285 logging.info("Revision %s is unknown, downloading remote changes" % revision)
277 repo_types[type].pull(target) 286 repo_types[type].pull(target)
278 resolved_revision = repo_types[type].get_revision_id(target, revision) 287 resolved_revision = repo_types[type].get_revision_id(target, revisio n)
279 if not resolved_revision: 288 if not resolved_revision:
280 raise Exception("Failed to resolve revision %s" % revision) 289 raise Exception("Failed to resolve revision %s" % revision)
281 290
282 logging.info("Updating repository %s to revision %s" % (target, resolved_rev ision)) 291 logging.info("Updating repository %s to revision %s" % (target, resolved _revision))
283 repo_types[type].update(target, resolved_revision, revision) 292 repo_types[type].update(target, resolved_revision, revision)
293
284 294
285 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()): 295 def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep endencies=set()):
286 config = read_deps(repodir) 296 config = read_deps(repodir)
287 if config is None: 297 if config is None:
288 if level == 0: 298 if level == 0:
289 logging.warning("No dependencies file in directory %s, nothing to do...\n% s" % (repodir, USAGE)) 299 logging.warning("No dependencies file in directory %s, nothing to do ...\n%s" % (repodir, USAGE))
290 return 300 return
291 if level >= 10: 301 if level >= 10:
292 logging.warning("Too much subrepository nesting, ignoring %s" % repo) 302 logging.warning("Too much subrepository nesting, ignoring %s" % repo)
293 return 303 return
294 304
295 if overrideroots is not None: 305 if overrideroots is not None:
296 config["_root"] = overrideroots 306 config["_root"] = overrideroots
297 307
298 for dir, sources in config.iteritems(): 308 for dir, sources in config.iteritems():
299 if (dir.startswith("_") or 309 if (dir.startswith("_") or
300 skipdependencies.intersection([s[0] for s in sources if s[0]])): 310 skipdependencies.intersection([s[0] for s in sources if s[0]])):
301 continue 311 continue
302 312
303 target = safe_join(repodir, dir) 313 target = safe_join(repodir, dir)
304 parenttype = get_repo_type(repodir) 314 parenttype = get_repo_type(repodir)
305 _root = config.get("_root", {}) 315 _root = config.get("_root", {})
306 316
307 for key in sources.keys() + _root.keys(): 317 for key in sources.keys() + _root.keys():
308 if key == parenttype or key is None and vcs != "*": 318 if key == parenttype or key is None and vcs != "*":
309 vcs = key 319 vcs = key
310 source, rev = merge_seqs(sources.get("*"), sources.get(vcs)) 320 source, rev = merge_seqs(sources.get("*"), sources.get(vcs))
311 321
312 if not (vcs and source and rev): 322 if not (vcs and source and rev):
313 logging.warning("No valid source / revision found to create %s" % target) 323 logging.warning("No valid source / revision found to create %s" % ta rget)
314 continue 324 continue
315 325
316 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source) 326 ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source )
317 update_repo(target, vcs, rev) 327 update_repo(target, vcs, rev)
318 resolve_deps(target, level + 1, self_update=False, 328 resolve_deps(target, level + 1, self_update=False,
319 overrideroots=overrideroots, skipdependencies=skipdependencies) 329 overrideroots=overrideroots, skipdependencies=skipdependenc ies)
320 330
321 if self_update and "_self" in config and "*" in config["_self"]: 331 if self_update and "_self" in config and "*" in config["_self"]:
322 source = safe_join(repodir, config["_self"]["*"]) 332 source = safe_join(repodir, config["_self"]["*"])
323 try: 333 try:
324 with io.open(source, "rb") as handle: 334 with io.open(source, "rb") as handle:
325 sourcedata = handle.read() 335 sourcedata = handle.read()
326 except IOError, e: 336 except IOError, e:
327 if e.errno != errno.ENOENT: 337 if e.errno != errno.ENOENT:
328 raise 338 raise
329 logging.warning("File %s doesn't exist, skipping self-update" % source) 339 logging.warning("File %s doesn't exist, skipping self-update" % sour ce)
330 return 340 return
331 341
332 target = __file__ 342 target = __file__
333 with io.open(target, "rb") as handle: 343 with io.open(target, "rb") as handle:
334 targetdata = handle.read() 344 targetdata = handle.read()
335 345
336 if sourcedata != targetdata: 346 if sourcedata != targetdata:
337 logging.info("Updating %s from %s, don't forget to commit" % (source, targ et)) 347 logging.info("Updating %s from %s, don't forget to commit" % (target , source))
338 with io.open(target, "wb") as handle: 348 with io.open(target, "wb") as handle:
339 handle.write(sourcedata) 349 handle.write(sourcedata)
340 if __name__ == "__main__": 350 if __name__ == "__main__":
341 logging.info("Restarting %s" % target) 351 logging.info("Restarting %s" % target)
342 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:]) 352 os.execv(sys.executable, [sys.executable, target] + sys.argv[1:] )
343 else: 353 else:
344 logging.warning("Cannot restart %s automatically, please rerun" % target ) 354 logging.warning("Cannot restart %s automatically, please rerun" % target)
355
345 356
346 def _ensure_line_exists(path, pattern): 357 def _ensure_line_exists(path, pattern):
347 with open(path, 'a+') as f: 358 with open(path, 'a+') as f:
348 file_content = [l.strip() for l in f.readlines()] 359 file_content = [l.strip() for l in f.readlines()]
349 if not pattern in file_content: 360 if not pattern in file_content:
350 file_content.append(pattern) 361 file_content.append(pattern)
351 f.seek(0, os.SEEK_SET) 362 f.seek(0, os.SEEK_SET)
352 f.truncate() 363 f.truncate()
353 for l in file_content: 364 for l in file_content:
354 print >>f, l 365 print >>f, l
355 366
356 if __name__ == "__main__": 367 if __name__ == "__main__":
357 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) 368 logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
358 369
359 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.") 370 parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.")
360 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help=" Repository path") 371 parser.add_argument("repos", metavar="repository", type=str, nargs="*", help ="Repository path")
361 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress infor mational output") 372 parser.add_argument("-q", "--quiet", action="store_true", help="Suppress inf ormational output")
362 args = parser.parse_args() 373 args = parser.parse_args()
363 374
364 if args.quiet: 375 if args.quiet:
365 logging.disable(logging.INFO) 376 logging.disable(logging.INFO)
366 377
367 repos = args.repos 378 repos = args.repos
368 if not len(repos): 379 if not len(repos):
369 repos = [os.path.dirname(__file__)] 380 repos = [os.path.dirname(__file__)]
370 for repo in repos: 381 for repo in repos:
371 resolve_deps(repo) 382 resolve_deps(repo)
OLDNEW
« no previous file with comments | « dependencies ('k') | libadblockplus.gyp » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld