| OLD | NEW | 
|---|
| 1 /* | 1 /* | 
| 2  * This file is part of Adblock Plus <https://adblockplus.org/>, | 2  * This file is part of Adblock Plus <https://adblockplus.org/>, | 
| 3  * Copyright (C) 2006-2017 eyeo GmbH | 3  * Copyright (C) 2006-2017 eyeo GmbH | 
| 4  * | 4  * | 
| 5  * Adblock Plus is free software: you can redistribute it and/or modify | 5  * Adblock Plus is free software: you can redistribute it and/or modify | 
| 6  * it under the terms of the GNU General Public License version 3 as | 6  * it under the terms of the GNU General Public License version 3 as | 
| 7  * published by the Free Software Foundation. | 7  * published by the Free Software Foundation. | 
| 8  * | 8  * | 
| 9  * Adblock Plus is distributed in the hope that it will be useful, | 9  * Adblock Plus is distributed in the hope that it will be useful, | 
| 10  * but WITHOUT ANY WARRANTY; without even the implied warranty of | 10  * but WITHOUT ANY WARRANTY; without even the implied warranty of | 
| (...skipping 20 matching lines...) Expand all  Loading... | 
| 31                                    | typeMap.FONT | 31                                    | typeMap.FONT | 
| 32                                    | typeMap.MEDIA | 32                                    | typeMap.MEDIA | 
| 33                                    | typeMap.POPUP | 33                                    | typeMap.POPUP | 
| 34                                    | typeMap.OBJECT | 34                                    | typeMap.OBJECT | 
| 35                                    | typeMap.OBJECT_SUBREQUEST | 35                                    | typeMap.OBJECT_SUBREQUEST | 
| 36                                    | typeMap.XMLHTTPREQUEST | 36                                    | typeMap.XMLHTTPREQUEST | 
| 37                                    | typeMap.PING | 37                                    | typeMap.PING | 
| 38                                    | typeMap.SUBDOCUMENT | 38                                    | typeMap.SUBDOCUMENT | 
| 39                                    | typeMap.OTHER); | 39                                    | typeMap.OTHER); | 
| 40 | 40 | 
|  | 41 function callLater(func) | 
|  | 42 { | 
|  | 43   return new Promise(resolve => | 
|  | 44   { | 
|  | 45     let call = () => resolve(func()); | 
|  | 46 | 
|  | 47     // If this looks like Node.js, call process.nextTick, otherwise call | 
|  | 48     // setTimeout. | 
|  | 49     if (typeof process != "undefined") | 
|  | 50       process.nextTick(call); | 
|  | 51     else | 
|  | 52       setTimeout(call, 0); | 
|  | 53   }); | 
|  | 54 } | 
|  | 55 | 
|  | 56 function async(funcs) | 
|  | 57 { | 
|  | 58   if (!Array.isArray(funcs)) | 
|  | 59     funcs = Array.from(arguments); | 
|  | 60 | 
|  | 61   let lastPause = Date.now(); | 
|  | 62 | 
|  | 63   return funcs.reduce((promise, next) => promise.then(() => | 
|  | 64   { | 
|  | 65     // If it has been 100ms or longer since the last call, take a pause. This | 
|  | 66     // keeps the browser from freezing up. | 
|  | 67     let now = Date.now(); | 
|  | 68     if (now - lastPause >= 100) | 
|  | 69     { | 
|  | 70       lastPause = now; | 
|  | 71       return callLater(next); | 
|  | 72     } | 
|  | 73 | 
|  | 74     return next(); | 
|  | 75   }), | 
|  | 76   Promise.resolve()); | 
|  | 77 } | 
|  | 78 | 
| 41 function parseDomains(domains, included, excluded) | 79 function parseDomains(domains, included, excluded) | 
| 42 { | 80 { | 
| 43   for (let domain in domains) | 81   for (let domain in domains) | 
| 44   { | 82   { | 
| 45     if (domain != "") | 83     if (domain != "") | 
| 46     { | 84     { | 
| 47       let enabled = domains[domain]; | 85       let enabled = domains[domain]; | 
| 48       domain = punycode.toASCII(domain.toLowerCase()); | 86       domain = punycode.toASCII(domain.toLowerCase()); | 
| 49 | 87 | 
| 50       if (!enabled) | 88       if (!enabled) | 
| (...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 359   { | 397   { | 
| 360     newSelector.push(selector.substring(i, pos.start)); | 398     newSelector.push(selector.substring(i, pos.start)); | 
| 361     newSelector.push('[id=', selector.substring(pos.start + 1, pos.end), ']'); | 399     newSelector.push('[id=', selector.substring(pos.start + 1, pos.end), ']'); | 
| 362     i = pos.end; | 400     i = pos.end; | 
| 363   } | 401   } | 
| 364   newSelector.push(selector.substring(i)); | 402   newSelector.push(selector.substring(i)); | 
| 365 | 403 | 
| 366   return newSelector.join(""); | 404   return newSelector.join(""); | 
| 367 } | 405 } | 
| 368 | 406 | 
|  | 407 /** | 
|  | 408  * Check if two strings are a close match | 
|  | 409  * | 
|  | 410  * This function returns an edit operation, one of "substitute", "delete", and | 
|  | 411  * "insert", along with an index in the source string where the edit must occur | 
|  | 412  * in order to arrive at the target string. If the strings are not a close | 
|  | 413  * match, it returns null. | 
|  | 414  * | 
|  | 415  * Two strings are considered to be a close match if they are one edit | 
|  | 416  * operation apart. | 
|  | 417  * | 
|  | 418  * Deletions or insertions of a contiguous range of characters from one string | 
|  | 419  * into the other, at the same index, are treated as a single edit. For | 
|  | 420  * example, "internal" and "international" are considered to be one edit apart | 
|  | 421  * and therefore a close match. | 
|  | 422  * | 
|  | 423  * A few things to note: | 
|  | 424  * | 
|  | 425  *   1) This function does not care about the format of the input strings. For | 
|  | 426  *   example, the caller may pass in regular expressions, where "[ab]" and | 
|  | 427  *   "[bc]" could be considered to be a close match, since the order within the | 
|  | 428  *   brackets doesn't matter. This function will still return null for this set | 
|  | 429  *   of inputs since they are two edits apart. | 
|  | 430  * | 
|  | 431  *   2) To be friendly to calling code that might be passing in regular | 
|  | 432  *   expressions, this function will simply return null if it encounters a | 
|  | 433  *   special character (e.g. "\", "?", "+", etc.) in the delta. For example, | 
|  | 434  *   given "Hello" and "Hello, how are you?", it will return null. | 
|  | 435  * | 
|  | 436  *   3) If the caller does indeed pass in regular expressions, it must make the | 
|  | 437  *   important assumption that the parts where two such regular expressions may | 
|  | 438  *   differ can always be treated as normal strings. For example, | 
|  | 439  *   "^https?://example.com/ads" and "^https?://example.com/adv" differ only in | 
|  | 440  *   the last character, therefore the regular expressions can safely be merged | 
|  | 441  *   into "^https?://example.com/ad[sv]". | 
|  | 442  * | 
|  | 443  * @param {string} s The source string | 
|  | 444  * @param {string} t The target string | 
|  | 445  * | 
|  | 446  * @returns {object} An object describing the single edit operation that must | 
|  | 447  *                   occur in the source string in order to arrive at the | 
|  | 448  *                   target string | 
|  | 449  */ | 
|  | 450 function closeMatch(s, t) | 
|  | 451 { | 
|  | 452   let diff = s.length - t.length; | 
|  | 453 | 
|  | 454   // If target is longer than source, swap them for the purpose of our | 
|  | 455   // calculation. | 
|  | 456   if (diff < 0) | 
|  | 457   { | 
|  | 458     let tmp = s; | 
|  | 459     s = t; | 
|  | 460     t = tmp; | 
|  | 461   } | 
|  | 462 | 
|  | 463   let edit = null; | 
|  | 464 | 
|  | 465   let i = 0, j = 0; | 
|  | 466 | 
|  | 467   // Start from the beginning and keep going until we hit a character that | 
|  | 468   // doesn't match. | 
|  | 469   for (; i < s.length; i++) | 
|  | 470   { | 
|  | 471     if (s[i] != t[i]) | 
|  | 472       break; | 
|  | 473   } | 
|  | 474 | 
|  | 475   // Now do exactly the same from the end, but also stop if we reach the | 
|  | 476   // position where we terminated the previous loop. | 
|  | 477   for (; j < t.length; j++) | 
|  | 478   { | 
|  | 479     if (t.length - j == i || s[s.length - j - 1] != t[t.length - j - 1]) | 
|  | 480       break; | 
|  | 481   } | 
|  | 482 | 
|  | 483   if (diff == 0) | 
|  | 484   { | 
|  | 485     // If the strings are equal in length and the delta isn't exactly one | 
|  | 486     // character, it's not a close match. | 
|  | 487     if (t.length - j - i != 1) | 
|  | 488       return null; | 
|  | 489   } | 
|  | 490   else if (i != t.length - j) | 
|  | 491   { | 
|  | 492     // For strings of unequal length, if we haven't found a match for every | 
|  | 493     // single character in the shorter string counting from both the beginning | 
|  | 494     // and the end, it's not a close match. | 
|  | 495     return null; | 
|  | 496   } | 
|  | 497 | 
|  | 498   for (let k = i; k < s.length - j; k++) | 
|  | 499   { | 
|  | 500     // If the delta contains any special characters, it's not a close match. | 
|  | 501     if (s[k] == "." || s[k] == "+" || s[k] == "$" || s[k] == "?" || | 
|  | 502         s[k] == "{" || s[k] == "}" || s[k] == "(" || s[k] == ")" || | 
|  | 503         s[k] == "[" || s[k] == "]" || s[k] == "\\") | 
|  | 504       return null; | 
|  | 505   } | 
|  | 506 | 
|  | 507   if (diff == 0) | 
|  | 508   { | 
|  | 509     edit = {type: "substitute", index: i}; | 
|  | 510   } | 
|  | 511   else if (diff > 0) | 
|  | 512   { | 
|  | 513     edit = {type: "delete", index: i}; | 
|  | 514 | 
|  | 515     if (diff > 1) | 
|  | 516       edit.endIndex = s.length - j; | 
|  | 517   } | 
|  | 518   else | 
|  | 519   { | 
|  | 520     edit = {type: "insert", index: i}; | 
|  | 521 | 
|  | 522     if (diff < -1) | 
|  | 523       edit.endIndex = s.length - j; | 
|  | 524   } | 
|  | 525 | 
|  | 526   return edit; | 
|  | 527 } | 
|  | 528 | 
|  | 529 function eliminateRedundantRulesByURLFilter(rulesInfo, exhaustive) | 
|  | 530 { | 
|  | 531   const heuristicRange = 1000; | 
|  | 532 | 
|  | 533   // Throw out obviously redundant rules. | 
|  | 534   return async(rulesInfo.map((ruleInfo, index) => () => | 
|  | 535   { | 
|  | 536     // If this rule is already marked as redundant, don't bother comparing it | 
|  | 537     // with other rules. | 
|  | 538     if (rulesInfo[index].redundant) | 
|  | 539       return; | 
|  | 540 | 
|  | 541     let limit = exhaustive ? rulesInfo.length : | 
|  | 542                 Math.min(index + heuristicRange, rulesInfo.length); | 
|  | 543 | 
|  | 544     for (let i = index, j = i + 1; j < limit; j++) | 
|  | 545     { | 
|  | 546       if (rulesInfo[j].redundant) | 
|  | 547         continue; | 
|  | 548 | 
|  | 549       let source = rulesInfo[i].rule.trigger["url-filter"]; | 
|  | 550       let target = rulesInfo[j].rule.trigger["url-filter"]; | 
|  | 551 | 
|  | 552       if (source.length >= target.length) | 
|  | 553       { | 
|  | 554         // If one URL filter is a substring of the other starting at the | 
|  | 555         // beginning, the other one is clearly redundant. | 
|  | 556         if (source.substring(0, target.length) == target) | 
|  | 557         { | 
|  | 558           rulesInfo[i].redundant = true; | 
|  | 559           break; | 
|  | 560         } | 
|  | 561       } | 
|  | 562       else if (target.substring(0, source.length) == source) | 
|  | 563       { | 
|  | 564         rulesInfo[j].redundant = true; | 
|  | 565       } | 
|  | 566     } | 
|  | 567   })) | 
|  | 568   .then(() => rulesInfo.filter(ruleInfo => !ruleInfo.redundant)); | 
|  | 569 } | 
|  | 570 | 
|  | 571 function findMatchesForRuleByURLFilter(rulesInfo, index, exhaustive) | 
|  | 572 { | 
|  | 573   // Closely matching rules are likely to be within a certain range. We only | 
|  | 574   // look for matches within this range by default. If we increase this value, | 
|  | 575   // it can give us more matches and a smaller resulting rule set, but possibly | 
|  | 576   // at a significant performance cost. | 
|  | 577   // | 
|  | 578   // If the exhaustive option is true, we simply ignore this value and look for | 
|  | 579   // matches throughout the rule set. | 
|  | 580   const heuristicRange = 1000; | 
|  | 581 | 
|  | 582   let limit = exhaustive ? rulesInfo.length : | 
|  | 583               Math.min(index + heuristicRange, rulesInfo.length); | 
|  | 584 | 
|  | 585   for (let i = index, j = i + 1; j < limit; j++) | 
|  | 586   { | 
|  | 587     let source = rulesInfo[i].rule.trigger["url-filter"]; | 
|  | 588     let target = rulesInfo[j].rule.trigger["url-filter"]; | 
|  | 589 | 
|  | 590     let edit = closeMatch(source, target); | 
|  | 591 | 
|  | 592     if (edit) | 
|  | 593     { | 
|  | 594       let urlFilter, ruleInfo, match = {edit}; | 
|  | 595 | 
|  | 596       if (edit.type == "insert") | 
|  | 597       { | 
|  | 598         // Convert the insertion into a deletion and stick it on the target | 
|  | 599         // rule instead. We can only group deletions and substitutions; | 
|  | 600         // therefore insertions must be treated as deletions on the target | 
|  | 601         // rule. | 
|  | 602         urlFilter = target; | 
|  | 603         ruleInfo = rulesInfo[j]; | 
|  | 604         match.index = i; | 
|  | 605         edit.type = "delete"; | 
|  | 606       } | 
|  | 607       else | 
|  | 608       { | 
|  | 609         urlFilter = source; | 
|  | 610         ruleInfo = rulesInfo[i]; | 
|  | 611         match.index = j; | 
|  | 612       } | 
|  | 613 | 
|  | 614       // If the edit has an end index, it represents a multiple character | 
|  | 615       // edit. | 
|  | 616       let multiEdit = !!edit.endIndex; | 
|  | 617 | 
|  | 618       if (multiEdit) | 
|  | 619       { | 
|  | 620         // We only care about a single multiple character edit because the | 
|  | 621         // number of characters for such a match doesn't matter, we can | 
|  | 622         // only merge with one other rule. | 
|  | 623         if (!ruleInfo.multiEditMatch) | 
|  | 624           ruleInfo.multiEditMatch = match; | 
|  | 625       } | 
|  | 626       else | 
|  | 627       { | 
|  | 628         // For single character edits, multiple rules can be merged into | 
|  | 629         // one. e.g. "ad", "ads", and "adv" can be merged into "ad[sv]?". | 
|  | 630         if (!ruleInfo.matches) | 
|  | 631           ruleInfo.matches = new Array(urlFilter.length); | 
|  | 632 | 
|  | 633         // Matches at a particular index. For example, for a source string | 
|  | 634         // "ads", both target strings "ad" (deletion) and "adv" | 
|  | 635         // (substitution) match at index 2, hence they are grouped together | 
|  | 636         // to possibly be merged later into "ad[sv]?". | 
|  | 637         let matchesForIndex = ruleInfo.matches[edit.index]; | 
|  | 638 | 
|  | 639         if (matchesForIndex) | 
|  | 640         { | 
|  | 641           matchesForIndex.push(match); | 
|  | 642         } | 
|  | 643         else | 
|  | 644         { | 
|  | 645           matchesForIndex = [match]; | 
|  | 646           ruleInfo.matches[edit.index] = matchesForIndex; | 
|  | 647         } | 
|  | 648 | 
|  | 649         // Keep track of the best set of matches. We later sort by this to | 
|  | 650         // get best results. | 
|  | 651         if (!ruleInfo.bestMatches || | 
|  | 652             matchesForIndex.length > ruleInfo.bestMatches.length) | 
|  | 653           ruleInfo.bestMatches = matchesForIndex; | 
|  | 654       } | 
|  | 655     } | 
|  | 656   } | 
|  | 657 } | 
|  | 658 | 
|  | 659 function mergeCandidateRulesByURLFilter(rulesInfo) | 
|  | 660 { | 
|  | 661   // Filter out rules that have no matches at all. | 
|  | 662   let candidateRulesInfo = rulesInfo.filter(ruleInfo => | 
|  | 663   { | 
|  | 664     return ruleInfo.bestMatches || ruleInfo.multiEditMatch | 
|  | 665   }); | 
|  | 666 | 
|  | 667   // For best results, we have to sort the candidates by the largest set of | 
|  | 668   // matches. | 
|  | 669   // | 
|  | 670   // For example, we want "ads", "bds", "adv", "bdv", "adx", and "bdx" to | 
|  | 671   // generate "ad[svx]" and "bd[svx]" (2 rules), not "[ab]ds", "[ab]dv", and | 
|  | 672   // "[ab]dx" (3 rules). | 
|  | 673   candidateRulesInfo.sort((ruleInfo1, ruleInfo2) => | 
|  | 674   { | 
|  | 675     let weight1 = ruleInfo1.bestMatches ? ruleInfo1.bestMatches.length : | 
|  | 676                   ruleInfo1.multiEditMatch ? 1 : 0; | 
|  | 677     let weight2 = ruleInfo2.bestMatches ? ruleInfo2.bestMatches.length : | 
|  | 678                   ruleInfo2.multiEditMatch ? 1 : 0; | 
|  | 679 | 
|  | 680     return weight2 - weight1; | 
|  | 681   }); | 
|  | 682 | 
|  | 683   for (let ruleInfo of candidateRulesInfo) | 
|  | 684   { | 
|  | 685     let rule = ruleInfo.rule; | 
|  | 686 | 
|  | 687     // If this rule has already been merged into another rule, we skip it. | 
|  | 688     if (ruleInfo.merged) | 
|  | 689       continue; | 
|  | 690 | 
|  | 691     // Find the best set of rules to group, which is simply the largest set. | 
|  | 692     let best = (ruleInfo.matches || []).reduce((best, matchesForIndex) => | 
|  | 693     { | 
|  | 694       matchesForIndex = (matchesForIndex || []).filter(match => | 
|  | 695       { | 
|  | 696         // Filter out rules that have either already been merged into other | 
|  | 697         // rules or have had other rules merged into them. | 
|  | 698         return !rulesInfo[match.index].merged && | 
|  | 699                !rulesInfo[match.index].mergedInto; | 
|  | 700       }); | 
|  | 701 | 
|  | 702       return matchesForIndex.length > best.length ? matchesForIndex : best; | 
|  | 703     }, | 
|  | 704     []); | 
|  | 705 | 
|  | 706     let multiEdit = false; | 
|  | 707 | 
|  | 708     // If we couldn't find a single rule to merge with, let's see if we have a | 
|  | 709     // multiple character edit. e.g. we could merge "ad" and "adserver" into | 
|  | 710     // "ad(server)?". | 
|  | 711     if (best.length == 0 && ruleInfo.multiEditMatch && | 
|  | 712         !rulesInfo[ruleInfo.multiEditMatch.index].merged && | 
|  | 713         !rulesInfo[ruleInfo.multiEditMatch.index].mergedInto) | 
|  | 714     { | 
|  | 715       best = [ruleInfo.multiEditMatch]; | 
|  | 716       multiEdit = true; | 
|  | 717     } | 
|  | 718 | 
|  | 719     if (best.length > 0) | 
|  | 720     { | 
|  | 721       let urlFilter = rule.trigger["url-filter"]; | 
|  | 722 | 
|  | 723       let editIndex = best[0].edit.index; | 
|  | 724 | 
|  | 725       if (!multiEdit) | 
|  | 726       { | 
|  | 727         // Merge all the matching rules into this one. | 
|  | 728 | 
|  | 729         let characters = []; | 
|  | 730         let quantifier = ""; | 
|  | 731 | 
|  | 732         for (let match of best) | 
|  | 733         { | 
|  | 734           if (match.edit.type == "delete") | 
|  | 735           { | 
|  | 736             quantifier = "?"; | 
|  | 737           } | 
|  | 738           else | 
|  | 739           { | 
|  | 740             let character = rulesInfo[match.index].rule | 
|  | 741                             .trigger["url-filter"][editIndex]; | 
|  | 742             characters.push(character); | 
|  | 743           } | 
|  | 744 | 
|  | 745           // Mark the target rule as merged so other rules don't try to merge | 
|  | 746           // it again. | 
|  | 747           rulesInfo[match.index].merged = true; | 
|  | 748         } | 
|  | 749 | 
|  | 750         urlFilter = urlFilter.substring(0, editIndex + 1) + quantifier + | 
|  | 751                     urlFilter.substring(editIndex + 1); | 
|  | 752         if (characters.length > 0) | 
|  | 753         { | 
|  | 754           urlFilter = urlFilter.substring(0, editIndex) + "[" + | 
|  | 755                       urlFilter[editIndex] + characters.join("") + "]" + | 
|  | 756                       urlFilter.substring(editIndex + 1); | 
|  | 757         } | 
|  | 758       } | 
|  | 759       else | 
|  | 760       { | 
|  | 761         let editEndIndex = best[0].edit.endIndex; | 
|  | 762 | 
|  | 763         // Mark the target rule as merged so other rules don't try to merge it | 
|  | 764         // again. | 
|  | 765         rulesInfo[best[0].index].merged = true; | 
|  | 766 | 
|  | 767         urlFilter = urlFilter.substring(0, editIndex) + "(" + | 
|  | 768                     urlFilter.substring(editIndex, editEndIndex) + ")?" + | 
|  | 769                     urlFilter.substring(editEndIndex); | 
|  | 770       } | 
|  | 771 | 
|  | 772       rule.trigger["url-filter"] = urlFilter; | 
|  | 773 | 
|  | 774       // Mark this rule as one that has had other rules merged into it. | 
|  | 775       ruleInfo.mergedInto = true; | 
|  | 776     } | 
|  | 777   } | 
|  | 778 } | 
|  | 779 | 
|  | 780 function mergeRulesByURLFilter(rulesInfo, exhaustive) | 
|  | 781 { | 
|  | 782   return async(() => async(rulesInfo.map((ruleInfo, index) => () => | 
|  | 783     findMatchesForRuleByURLFilter(rulesInfo, index, exhaustive) | 
|  | 784   ))) | 
|  | 785   .then(() => mergeCandidateRulesByURLFilter(rulesInfo)); | 
|  | 786 } | 
|  | 787 | 
|  | 788 function mergeRulesByArrayProperty(rulesInfo, propertyType, property) | 
|  | 789 { | 
|  | 790   if (rulesInfo.length <= 1) | 
|  | 791     return; | 
|  | 792 | 
|  | 793   let oneRuleInfo = rulesInfo.shift(); | 
|  | 794   let valueSet = new Set(oneRuleInfo.rule[propertyType][property]); | 
|  | 795 | 
|  | 796   for (let ruleInfo of rulesInfo) | 
|  | 797   { | 
|  | 798     if (ruleInfo.rule[propertyType][property]) | 
|  | 799     { | 
|  | 800       for (let value of ruleInfo.rule[propertyType][property]) | 
|  | 801         valueSet.add(value); | 
|  | 802     } | 
|  | 803 | 
|  | 804     ruleInfo.merged = true; | 
|  | 805   } | 
|  | 806 | 
|  | 807   if (valueSet.size > 0) | 
|  | 808     oneRuleInfo.rule[propertyType][property] = Array.from(valueSet); | 
|  | 809 | 
|  | 810   oneRuleInfo.mergedInto = true; | 
|  | 811 } | 
|  | 812 | 
|  | 813 function groupRulesByMergeableProperty(rulesInfo, propertyType, property) | 
|  | 814 { | 
|  | 815   let mergeableRulesInfoByGroup = new Map(); | 
|  | 816 | 
|  | 817   for (let ruleInfo of rulesInfo) | 
|  | 818   { | 
|  | 819     let copy = { | 
|  | 820       trigger: Object.assign({}, ruleInfo.rule.trigger), | 
|  | 821       action: Object.assign({}, ruleInfo.rule.action) | 
|  | 822     }; | 
|  | 823 | 
|  | 824     delete copy[propertyType][property]; | 
|  | 825 | 
|  | 826     let groupKey = JSON.stringify(copy); | 
|  | 827 | 
|  | 828     let mergeableRulesInfo = mergeableRulesInfoByGroup.get(groupKey); | 
|  | 829 | 
|  | 830     if (mergeableRulesInfo) | 
|  | 831       mergeableRulesInfo.push(ruleInfo); | 
|  | 832     else | 
|  | 833       mergeableRulesInfoByGroup.set(groupKey, [ruleInfo]); | 
|  | 834   } | 
|  | 835 | 
|  | 836   return mergeableRulesInfoByGroup; | 
|  | 837 } | 
|  | 838 | 
|  | 839 function mergeRules(rules, exhaustive) | 
|  | 840 { | 
|  | 841   let rulesInfo = rules.map(rule => ({rule})); | 
|  | 842 | 
|  | 843   let arrayPropertiesToMergeBy = ["resource-type", "if-domain"]; | 
|  | 844 | 
|  | 845   return async(() => | 
|  | 846   { | 
|  | 847     let map = groupRulesByMergeableProperty(rulesInfo, "trigger", "url-filter"); | 
|  | 848     return async(Array.from(map.values()).map(mergeableRulesInfo => () => | 
|  | 849       eliminateRedundantRulesByURLFilter(mergeableRulesInfo, exhaustive) | 
|  | 850       .then(rulesInfo => mergeRulesByURLFilter(rulesInfo, exhaustive)) | 
|  | 851     )) | 
|  | 852     .then(() => | 
|  | 853     { | 
|  | 854       // Filter out rules that are redundant or have been merged into other | 
|  | 855       // rules. | 
|  | 856       rulesInfo = rulesInfo.filter(ruleInfo => !ruleInfo.redundant && | 
|  | 857                                                !ruleInfo.merged); | 
|  | 858     }); | 
|  | 859   }) | 
|  | 860   .then(() => async(arrayPropertiesToMergeBy.map(arrayProperty => () => | 
|  | 861   { | 
|  | 862     let map = groupRulesByMergeableProperty(rulesInfo, "trigger", | 
|  | 863                                             arrayProperty); | 
|  | 864     return async(Array.from(map.values()).map(mergeableRulesInfo => () => | 
|  | 865       mergeRulesByArrayProperty(mergeableRulesInfo, "trigger", arrayProperty) | 
|  | 866     )) | 
|  | 867     .then(() => | 
|  | 868     { | 
|  | 869       rulesInfo = rulesInfo.filter(ruleInfo => !ruleInfo.merged); | 
|  | 870     }); | 
|  | 871   }))) | 
|  | 872   .then(() => rulesInfo.map(ruleInfo => ruleInfo.rule)); | 
|  | 873 } | 
|  | 874 | 
| 369 let ContentBlockerList = | 875 let ContentBlockerList = | 
| 370 /** | 876 /** | 
| 371  * Create a new Adblock Plus filter to content blocker list converter | 877  * Create a new Adblock Plus filter to content blocker list converter | 
| 372  * | 878  * | 
|  | 879  * @param {object} options Options for content blocker list generation | 
|  | 880  * | 
| 373  * @constructor | 881  * @constructor | 
| 374  */ | 882  */ | 
| 375 exports.ContentBlockerList = function () | 883 exports.ContentBlockerList = function(options) | 
| 376 { | 884 { | 
|  | 885   const defaultOptions = { | 
|  | 886     merge: "auto" | 
|  | 887   }; | 
|  | 888 | 
|  | 889   this.options = Object.assign({}, defaultOptions, options); | 
|  | 890 | 
| 377   this.requestFilters = []; | 891   this.requestFilters = []; | 
| 378   this.requestExceptions = []; | 892   this.requestExceptions = []; | 
| 379   this.elemhideFilters = []; | 893   this.elemhideFilters = []; | 
| 380   this.elemhideExceptions =  []; | 894   this.elemhideExceptions =  []; | 
| 381   this.elemhideSelectorExceptions = new Map(); | 895   this.elemhideSelectorExceptions = new Map(); | 
| 382 }; | 896 }; | 
| 383 | 897 | 
| 384 /** | 898 /** | 
| 385  * Add Adblock Plus filter to be converted | 899  * Add Adblock Plus filter to be converted | 
| 386  * | 900  * | 
| (...skipping 27 matching lines...) Expand all  Loading... | 
| 414     let domains = this.elemhideSelectorExceptions[filter.selector]; | 928     let domains = this.elemhideSelectorExceptions[filter.selector]; | 
| 415     if (!domains) | 929     if (!domains) | 
| 416       domains = this.elemhideSelectorExceptions[filter.selector] = []; | 930       domains = this.elemhideSelectorExceptions[filter.selector] = []; | 
| 417 | 931 | 
| 418     parseDomains(filter.domains, domains, []); | 932     parseDomains(filter.domains, domains, []); | 
| 419   } | 933   } | 
| 420 }; | 934 }; | 
| 421 | 935 | 
| 422 /** | 936 /** | 
| 423  * Generate content blocker list for all filters that were added | 937  * Generate content blocker list for all filters that were added | 
| 424  * |  | 
| 425  * @returns   {Filter}   filter    Filter to convert |  | 
| 426  */ | 938  */ | 
| 427 ContentBlockerList.prototype.generateRules = function(filter) | 939 ContentBlockerList.prototype.generateRules = function() | 
| 428 { | 940 { | 
| 429   let rules = []; | 941   let cssRules = []; | 
|  | 942   let cssExceptionRules = []; | 
|  | 943   let blockingRules = []; | 
|  | 944   let blockingExceptionRules = []; | 
|  | 945 | 
|  | 946   let ruleGroups = [cssRules, cssExceptionRules, | 
|  | 947                     blockingRules, blockingExceptionRules]; | 
| 430 | 948 | 
| 431   let groupedElemhideFilters = new Map(); | 949   let groupedElemhideFilters = new Map(); | 
| 432   for (let filter of this.elemhideFilters) | 950   for (let filter of this.elemhideFilters) | 
| 433   { | 951   { | 
| 434     let result = convertElemHideFilter(filter, this.elemhideSelectorExceptions); | 952     let result = convertElemHideFilter(filter, this.elemhideSelectorExceptions); | 
| 435     if (!result) | 953     if (!result) | 
| 436       continue; | 954       continue; | 
| 437 | 955 | 
| 438     if (result.matchDomains.length == 0) | 956     if (result.matchDomains.length == 0) | 
| 439       result.matchDomains = ["^https?://"]; | 957       result.matchDomains = ["^https?://"]; | 
| 440 | 958 | 
| 441     for (let matchDomain of result.matchDomains) | 959     for (let matchDomain of result.matchDomains) | 
| 442     { | 960     { | 
| 443       let group = groupedElemhideFilters.get(matchDomain) || []; | 961       let group = groupedElemhideFilters.get(matchDomain) || []; | 
| 444       group.push(result.selector); | 962       group.push(result.selector); | 
| 445       groupedElemhideFilters.set(matchDomain, group); | 963       groupedElemhideFilters.set(matchDomain, group); | 
| 446     } | 964     } | 
| 447   } | 965   } | 
| 448 | 966 | 
| 449   groupedElemhideFilters.forEach((selectors, matchDomain) => | 967   groupedElemhideFilters.forEach((selectors, matchDomain) => | 
| 450   { | 968   { | 
| 451     while (selectors.length) | 969     while (selectors.length) | 
| 452     { | 970     { | 
| 453       let selector = selectors.splice(0, selectorLimit).join(", "); | 971       let selector = selectors.splice(0, selectorLimit).join(", "); | 
| 454 | 972 | 
| 455       // As of Safari 9.0 element IDs are matched as lowercase. We work around | 973       // As of Safari 9.0 element IDs are matched as lowercase. We work around | 
| 456       // this by converting to the attribute format [id="elementID"] | 974       // this by converting to the attribute format [id="elementID"] | 
| 457       selector = convertIDSelectorsToAttributeSelectors(selector); | 975       selector = convertIDSelectorsToAttributeSelectors(selector); | 
| 458 | 976 | 
| 459       rules.push({ | 977       cssRules.push({ | 
| 460         trigger: {"url-filter": matchDomain, | 978         trigger: {"url-filter": matchDomain, | 
| 461                   "url-filter-is-case-sensitive": true}, | 979                   "url-filter-is-case-sensitive": true}, | 
| 462         action: {type: "css-display-none", | 980         action: {type: "css-display-none", | 
| 463                  selector: selector} | 981                  selector: selector} | 
| 464       }); | 982       }); | 
| 465     } | 983     } | 
| 466   }); | 984   }); | 
| 467 | 985 | 
| 468   for (let filter of this.elemhideExceptions) | 986   for (let filter of this.elemhideExceptions) | 
| 469     convertFilterAddRules(rules, filter, "ignore-previous-rules", false); | 987   { | 
|  | 988     convertFilterAddRules(cssExceptionRules, filter, | 
|  | 989                           "ignore-previous-rules", false); | 
|  | 990   } | 
|  | 991 | 
| 470   for (let filter of this.requestFilters) | 992   for (let filter of this.requestFilters) | 
| 471     convertFilterAddRules(rules, filter, "block", true); | 993     convertFilterAddRules(blockingRules, filter, "block", true); | 
|  | 994 | 
| 472   for (let filter of this.requestExceptions) | 995   for (let filter of this.requestExceptions) | 
| 473     convertFilterAddRules(rules, filter, "ignore-previous-rules", true); | 996   { | 
|  | 997     convertFilterAddRules(blockingExceptionRules, filter, | 
|  | 998                           "ignore-previous-rules", true); | 
|  | 999   } | 
| 474 | 1000 | 
| 475   return rules.filter(rule => !hasNonASCI(rule)); | 1001   return async(ruleGroups.map((group, index) => () => | 
|  | 1002   { | 
|  | 1003     let next = () => | 
|  | 1004     { | 
|  | 1005       if (index == ruleGroups.length - 1) | 
|  | 1006         return ruleGroups.reduce((all, rules) => all.concat(rules), []); | 
|  | 1007     }; | 
|  | 1008 | 
|  | 1009     ruleGroups[index] = ruleGroups[index].filter(rule => !hasNonASCI(rule)); | 
|  | 1010 | 
|  | 1011     if (this.options.merge == "all" || | 
|  | 1012         (this.options.merge == "auto" && | 
|  | 1013          ruleGroups.reduce((n, group) => n + group.length, 0) > 50000)) | 
|  | 1014     { | 
|  | 1015       return mergeRules(ruleGroups[index], this.options.merge == "all") | 
|  | 1016       .then(rules => | 
|  | 1017       { | 
|  | 1018         ruleGroups[index] = rules; | 
|  | 1019         return next(); | 
|  | 1020       }); | 
|  | 1021     } | 
|  | 1022 | 
|  | 1023     return next(); | 
|  | 1024   })); | 
| 476 }; | 1025 }; | 
| OLD | NEW | 
|---|