Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: lib/crawler.js

Issue 29338153: Issue 3780 - wait for the loading of filters and only afterwards start to fetch pages (Closed)
Patch Set: remove empty line Created March 15, 2016, 2:38 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * This Source Code is subject to the terms of the Mozilla Public License 2 * This Source Code is subject to the terms of the Mozilla Public License
3 * version 2.0 (the "License"). You can obtain a copy of the License at 3 * version 2.0 (the "License"). You can obtain a copy of the License at
4 * http://mozilla.org/MPL/2.0/. 4 * http://mozilla.org/MPL/2.0/.
5 */ 5 */
6 6
7 /** 7 /**
8 * @module crawler 8 * @module crawler
9 */ 9 */
10 10
11 Cu.import("resource://gre/modules/Services.jsm"); 11 Cu.import("resource://gre/modules/Services.jsm");
12 Cu.import("resource://gre/modules/Task.jsm"); 12 Cu.import("resource://gre/modules/Task.jsm");
13 Cu.import("resource://gre/modules/Promise.jsm"); 13 Cu.import("resource://gre/modules/Promise.jsm");
14 14
15 function abprequire(module) 15 function abprequire(module)
16 { 16 {
17 let result = {}; 17 let result = {};
18 result.wrappedJSObject = result; 18 result.wrappedJSObject = result;
19 Services.obs.notifyObservers(result, "adblockplus-require", module); 19 Services.obs.notifyObservers(result, "adblockplus-require", module);
20 return result.exports; 20 return result.exports;
21 } 21 }
22 22
23 let {RequestNotifier} = abprequire("requestNotifier"); 23 let {RequestNotifier} = abprequire("requestNotifier");
24 24 let {FilterNotifier} = abprequire("filterNotifier");
25 let {FilterStorage} = abprequire("filterStorage");
25 26
26 /** 27 /**
27 * Creates a pool of tabs and allocates them to tasks on request. 28 * Creates a pool of tabs and allocates them to tasks on request.
28 * 29 *
29 * @param {tabbrowser} browser 30 * @param {tabbrowser} browser
30 * The tabbed browser where tabs should be created 31 * The tabbed browser where tabs should be created
31 * @param {int} maxtabs 32 * @param {int} maxtabs
32 * The maximum number of tabs to be allocated 33 * The maximum number of tabs to be allocated
33 * @constructor 34 * @constructor
34 */ 35 */
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
190 }; 191 };
191 192
192 /** 193 /**
193 * Starts the crawling session. The crawler opens each URL in a tab and stores 194 * Starts the crawling session. The crawler opens each URL in a tab and stores
194 * the results. 195 * the results.
195 * 196 *
196 * @param {Window} window 197 * @param {Window} window
197 * The browser window we're operating in 198 * The browser window we're operating in
198 * @param {String[]} urls 199 * @param {String[]} urls
199 * URLs to be crawled 200 * URLs to be crawled
200 * @param {int} number_of_tabs 201 * @param {int} timeout
202 * Load timeout in milliseconds
203 * @param {int} maxtabs
201 * Maximum number of tabs to be opened 204 * Maximum number of tabs to be opened
202 * @param {String} targetURL 205 * @param {String} targetURL
203 * URL that should receive the results 206 * URL that should receive the results
207 * @param {Function} onDone
208 * The callback which is called after finishing of crawling of all URLs.
204 */ 209 */
205 function run(window, urls, timeout, maxtabs, targetURL, onDone) 210 function run(window, urls, timeout, maxtabs, targetURL, onDone)
206 { 211 {
212 new Promise((resolve, reject) =>
213 {
214 if (FilterStorage.subscriptions.length > 0 && !FilterStorage._loading)
215 {
216 resolve();
217 return;
218 }
219 FilterNotifier.addListener((action, item, newValue, oldValue) =>
220 {
221 if (action == "load")
222 {
223 resolve();
224 }
225 });
226 }).then(() => crawl_urls(window, urls, timeout, maxtabs, targetURL, onDone)).c atch(reportException);
227 }
228 exports.run = run;
229
230 /**
231 * Spawns a {Task} task to crawl each url from `urls` argument and calls
232 * `onDone` when all tasks are finished.
233 * @param {Window} window
234 * The browser window we're operating in
235 * @param {String[]} urls
236 * URLs to be crawled
237 * @param {int} timeout
238 * Load timeout in milliseconds
239 * @param {int} maxtabs
240 * Maximum number of tabs to be opened
241 * @param {String} targetURL
242 * URL that should receive the results
243 * @param {Function} onDone
244 * The callback which is called after finishing of all tasks.
245 */
246 function crawl_urls(window, urls, timeout, maxtabs, targetURL, onDone)
247 {
207 let tabAllocator = new TabAllocator(window.getBrowser(), maxtabs); 248 let tabAllocator = new TabAllocator(window.getBrowser(), maxtabs);
208 let loadListener = new LoadListener(window.getBrowser(), timeout); 249 let loadListener = new LoadListener(window.getBrowser(), timeout);
209 let running = 0; 250 let running = 0;
210 let windowCloser = new WindowCloser(); 251 let windowCloser = new WindowCloser();
211 let taskDone = function() 252 let taskDone = function()
212 { 253 {
213 running--; 254 running--;
214 if (running <= 0) 255 if (running <= 0)
215 { 256 {
216 loadListener.stop(); 257 loadListener.stop();
(...skipping 21 matching lines...) Expand all
238 request.addEventListener("load", taskDone, false); 279 request.addEventListener("load", taskDone, false);
239 request.addEventListener("error", taskDone, false); 280 request.addEventListener("error", taskDone, false);
240 request.send(JSON.stringify({ 281 request.send(JSON.stringify({
241 url: url, 282 url: url,
242 startTime: Date.now(), 283 startTime: Date.now(),
243 error: String(exception) 284 error: String(exception)
244 })); 285 }));
245 }.bind(null, url)); 286 }.bind(null, url));
246 } 287 }
247 } 288 }
248 exports.run = run;
249 289
250 /** 290 /**
251 * Crawls a URL. This is a generator meant to be used via a Task object. 291 * Crawls a URL. This is a generator meant to be used via a Task object.
252 * 292 *
253 * @param {String} url 293 * @param {String} url
254 * @param {TabAllocator} tabAllocator 294 * @param {TabAllocator} tabAllocator
255 * @param {loadListener} loadListener 295 * @param {loadListener} loadListener
256 * @result {Object} 296 * @result {Object}
257 * Crawling result 297 * Crawling result
258 */ 298 */
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
313 353
314 function reportException(e) 354 function reportException(e)
315 { 355 {
316 let stack = ""; 356 let stack = "";
317 if (e && typeof e == "object" && "stack" in e) 357 if (e && typeof e == "object" && "stack" in e)
318 stack = e.stack + "\n"; 358 stack = e.stack + "\n";
319 359
320 Cu.reportError(e); 360 Cu.reportError(e);
321 dump(e + "\n" + stack + "\n"); 361 dump(e + "\n" + stack + "\n");
322 } 362 }
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld