Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Side by Side Diff: lib/abp2blocklist.js

Issue 29426594: Issue 3673 - Merge closely matching rules (Closed) Base URL: https://hg.adblockplus.org/abp2blocklist
Patch Set: Rebase with minor changes Created July 20, 2017, 3:45 p.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « abp2blocklist.js ('k') | test/abp2blocklist.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * This file is part of Adblock Plus <https://adblockplus.org/>, 2 * This file is part of Adblock Plus <https://adblockplus.org/>,
3 * Copyright (C) 2006-2017 eyeo GmbH 3 * Copyright (C) 2006-2017 eyeo GmbH
4 * 4 *
5 * Adblock Plus is free software: you can redistribute it and/or modify 5 * Adblock Plus is free software: you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 3 as 6 * it under the terms of the GNU General Public License version 3 as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
8 * 8 *
9 * Adblock Plus is distributed in the hope that it will be useful, 9 * Adblock Plus is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
(...skipping 29 matching lines...) Expand all
40 const rawRequestTypes = typeMap.XMLHTTPREQUEST | 40 const rawRequestTypes = typeMap.XMLHTTPREQUEST |
41 typeMap.WEBSOCKET | 41 typeMap.WEBSOCKET |
42 typeMap.WEBRTC | 42 typeMap.WEBRTC |
43 typeMap.OBJECT_SUBREQUEST | 43 typeMap.OBJECT_SUBREQUEST |
44 typeMap.PING | 44 typeMap.PING |
45 typeMap.OTHER; 45 typeMap.OTHER;
46 const whitelistableRequestTypes = httpRequestTypes | 46 const whitelistableRequestTypes = httpRequestTypes |
47 typeMap.WEBSOCKET | 47 typeMap.WEBSOCKET |
48 typeMap.WEBRTC; 48 typeMap.WEBRTC;
49 49
50 function callLater(func)
51 {
52 return new Promise(resolve =>
53 {
54 let call = () => resolve(func());
55
56 // If this looks like Node.js, call process.nextTick, otherwise call
57 // setTimeout.
58 if (typeof process != "undefined")
59 process.nextTick(call);
60 else
61 setTimeout(call, 0);
62 });
63 }
64
65 function async(funcs)
66 {
67 if (!Array.isArray(funcs))
68 funcs = Array.from(arguments);
69
70 let lastPause = Date.now();
71
72 return funcs.reduce((promise, next) => promise.then(() =>
73 {
74 // If it has been 100ms or longer since the last call, take a pause. This
75 // keeps the browser from freezing up.
76 let now = Date.now();
77 if (now - lastPause >= 100)
78 {
79 lastPause = now;
80 return callLater(next);
81 }
82
83 return next();
84 }),
85 Promise.resolve());
86 }
87
50 function parseDomains(domains, included, excluded) 88 function parseDomains(domains, included, excluded)
51 { 89 {
52 for (let domain in domains) 90 for (let domain in domains)
53 { 91 {
54 if (domain != "") 92 if (domain != "")
55 { 93 {
56 let enabled = domains[domain]; 94 let enabled = domains[domain];
57 domain = punycode.toASCII(domain.toLowerCase()); 95 domain = punycode.toASCII(domain.toLowerCase());
58 96
59 if (!enabled) 97 if (!enabled)
(...skipping 547 matching lines...) Expand 10 before | Expand all | Expand 10 after
607 selector: selector} 645 selector: selector}
608 }; 646 };
609 647
610 if (unlessDomain) 648 if (unlessDomain)
611 rule.trigger["unless-domain"] = unlessDomain; 649 rule.trigger["unless-domain"] = unlessDomain;
612 650
613 rules.push(rule); 651 rules.push(rule);
614 } 652 }
615 } 653 }
616 654
655 /**
656 * Check if two strings are a close match
657 *
658 * This function returns an edit operation, one of "substitute", "delete", and
659 * "insert", along with an index in the source string where the edit must occur
660 * in order to arrive at the target string. If the strings are not a close
661 * match, it returns null.
662 *
663 * Two strings are considered to be a close match if they are one edit
664 * operation apart.
665 *
666 * Deletions or insertions of a contiguous range of characters from one string
667 * into the other, at the same index, are treated as a single edit. For
668 * example, "internal" and "international" are considered to be one edit apart
669 * and therefore a close match.
670 *
671 * A few things to note:
672 *
673 * 1) This function does not care about the format of the input strings. For
674 * example, the caller may pass in regular expressions, where "[ab]" and
675 * "[bc]" could be considered to be a close match, since the order within the
676 * brackets doesn't matter. This function will still return null for this set
677 * of inputs since they are two edits apart.
678 *
679 * 2) To be friendly to calling code that might be passing in regular
680 * expressions, this function will simply return null if it encounters a
681 * special character (e.g. "\", "?", "+", etc.) in the delta. For example,
682 * given "Hello" and "Hello, how are you?", it will return null.
683 *
684 * 3) If the caller does indeed pass in regular expressions, it must make the
685 * important assumption that the parts where two such regular expressions may
686 * differ can always be treated as normal strings. For example,
687 * "^https?://example.com/ads" and "^https?://example.com/adv" differ only in
688 * the last character, therefore the regular expressions can safely be merged
689 * into "^https?://example.com/ad[sv]".
690 *
691 * @param {string} s The source string
692 * @param {string} t The target string
693 *
694 * @returns {object} An object describing the single edit operation that must
695 * occur in the source string in order to arrive at the
696 * target string
697 */
698 function closeMatch(s, t)
699 {
700 let diff = s.length - t.length;
701
702 // If target is longer than source, swap them for the purpose of our
703 // calculation.
704 if (diff < 0)
705 {
706 let tmp = s;
707 s = t;
708 t = tmp;
709 }
710
711 let edit = null;
712
713 let i = 0;
714 let j = 0;
715
716 // Start from the beginning and keep going until we hit a character that
717 // doesn't match.
718 for (; i < s.length; i++)
719 {
720 if (s[i] != t[i])
721 break;
722 }
723
724 // Now do exactly the same from the end, but also stop if we reach the
725 // position where we terminated the previous loop.
726 for (; j < t.length; j++)
727 {
728 if (t.length - j == i || s[s.length - j - 1] != t[t.length - j - 1])
729 break;
730 }
731
732 if (diff == 0)
733 {
734 // If the strings are equal in length and the delta isn't exactly one
735 // character, it's not a close match.
736 if (t.length - j - i != 1)
737 return null;
738 }
739 else if (i != t.length - j)
740 {
741 // For strings of unequal length, if we haven't found a match for every
742 // single character in the shorter string counting from both the beginning
743 // and the end, it's not a close match.
744 return null;
745 }
746
747 for (let k = i; k < s.length - j; k++)
748 {
749 // If the delta contains any special characters, it's not a close match.
750 if (s[k] == "." || s[k] == "+" || s[k] == "$" || s[k] == "?" ||
751 s[k] == "{" || s[k] == "}" || s[k] == "(" || s[k] == ")" ||
752 s[k] == "[" || s[k] == "]" || s[k] == "\\")
753 return null;
754 }
755
756 if (diff == 0)
757 {
758 edit = {type: "substitute", index: i};
759 }
760 else if (diff > 0)
761 {
762 edit = {type: "delete", index: i};
763
764 if (diff > 1)
765 edit.endIndex = s.length - j;
766 }
767 else
768 {
769 edit = {type: "insert", index: i};
770
771 if (diff < -1)
772 edit.endIndex = s.length - j;
773 }
774
775 return edit;
776 }
777
778 function eliminateRedundantRulesByURLFilter(rulesInfo, exhaustive)
779 {
780 const heuristicRange = 1000;
781
782 let ol = rulesInfo.length;
783
784 // Throw out obviously redundant rules.
785 return async(rulesInfo.map((ruleInfo, index) => () =>
786 {
787 // If this rule is already marked as redundant, don't bother comparing it
788 // with other rules.
789 if (rulesInfo[index].redundant)
790 return;
791
792 let limit = exhaustive ? rulesInfo.length :
793 Math.min(index + heuristicRange, rulesInfo.length);
794
795 for (let i = index, j = i + 1; j < limit; j++)
796 {
797 if (rulesInfo[j].redundant)
798 continue;
799
800 let source = rulesInfo[i].rule.trigger["url-filter"];
801 let target = rulesInfo[j].rule.trigger["url-filter"];
802
803 if (source.length >= target.length)
804 {
805 // If one URL filter is a substring of the other starting at the
806 // beginning, the other one is clearly redundant.
807 if (source.substring(0, target.length) == target)
808 {
809 rulesInfo[i].redundant = true;
810 break;
811 }
812 }
813 else if (target.substring(0, source.length) == source)
814 {
815 rulesInfo[j].redundant = true;
816 }
817 }
818 }))
819 .then(() => rulesInfo.filter(ruleInfo => !ruleInfo.redundant));
820 }
821
822 function findMatchesForRuleByURLFilter(rulesInfo, index, exhaustive)
823 {
824 // Closely matching rules are likely to be within a certain range. We only
825 // look for matches within this range by default. If we increase this value,
826 // it can give us more matches and a smaller resulting rule set, but possibly
827 // at a significant performance cost.
828 //
829 // If the exhaustive option is true, we simply ignore this value and look for
830 // matches throughout the rule set.
831 const heuristicRange = 1000;
832
833 let limit = exhaustive ? rulesInfo.length :
834 Math.min(index + heuristicRange, rulesInfo.length);
835
836 for (let i = index, j = i + 1; j < limit; j++)
837 {
838 let source = rulesInfo[i].rule.trigger["url-filter"];
839 let target = rulesInfo[j].rule.trigger["url-filter"];
840
841 let edit = closeMatch(source, target);
842
843 if (edit)
844 {
845 let urlFilter, ruleInfo, match = {edit};
846
847 if (edit.type == "insert")
848 {
849 // Convert the insertion into a deletion and stick it on the target
850 // rule instead. We can only group deletions and substitutions;
851 // therefore insertions must be treated as deletions on the target
852 // rule.
853 urlFilter = target;
854 ruleInfo = rulesInfo[j];
855 match.index = i;
856 edit.type = "delete";
857 }
858 else
859 {
860 urlFilter = source;
861 ruleInfo = rulesInfo[i];
862 match.index = j;
863 }
864
865 // If the edit has an end index, it represents a multiple character
866 // edit.
867 let multiEdit = !!edit.endIndex;
868
869 if (multiEdit)
870 {
871 // We only care about a single multiple character edit because the
872 // number of characters for such a match doesn't matter, we can
873 // only merge with one other rule.
874 if (!ruleInfo.multiEditMatch)
875 ruleInfo.multiEditMatch = match;
876 }
877 else
878 {
879 // For single character edits, multiple rules can be merged into
880 // one. e.g. "ad", "ads", and "adv" can be merged into "ad[sv]?".
881 if (!ruleInfo.matches)
882 ruleInfo.matches = new Array(urlFilter.length);
883
884 // Matches at a particular index. For example, for a source string
885 // "ads", both target strings "ad" (deletion) and "adv"
886 // (substitution) match at index 2, hence they are grouped together
887 // to possibly be merged later into "ad[sv]?".
888 let matchesForIndex = ruleInfo.matches[edit.index];
889
890 if (matchesForIndex)
891 {
892 matchesForIndex.push(match);
893 }
894 else
895 {
896 matchesForIndex = [match];
897 ruleInfo.matches[edit.index] = matchesForIndex;
898 }
899
900 // Keep track of the best set of matches. We later sort by this to
901 // get best results.
902 if (!ruleInfo.bestMatches ||
903 matchesForIndex.length > ruleInfo.bestMatches.length)
904 ruleInfo.bestMatches = matchesForIndex;
905 }
906 }
907 }
908 }
909
910 function mergeCandidateRulesByURLFilter(rulesInfo)
911 {
912 // Filter out rules that have no matches at all.
913 let candidateRulesInfo = rulesInfo.filter(ruleInfo =>
914 {
915 return ruleInfo.bestMatches || ruleInfo.multiEditMatch
916 });
917
918 // For best results, we have to sort the candidates by the largest set of
919 // matches.
920 //
921 // For example, we want "ads", "bds", "adv", "bdv", "adx", and "bdx" to
922 // generate "ad[svx]" and "bd[svx]" (2 rules), not "[ab]ds", "[ab]dv", and
923 // "[ab]dx" (3 rules).
924 candidateRulesInfo.sort((ruleInfo1, ruleInfo2) =>
925 {
926 let weight1 = ruleInfo1.bestMatches ? ruleInfo1.bestMatches.length :
927 ruleInfo1.multiEditMatch ? 1 : 0;
928 let weight2 = ruleInfo2.bestMatches ? ruleInfo2.bestMatches.length :
929 ruleInfo2.multiEditMatch ? 1 : 0;
930
931 return weight2 - weight1;
932 });
933
934 for (let ruleInfo of candidateRulesInfo)
935 {
936 let rule = ruleInfo.rule;
937
938 // If this rule has already been merged into another rule, we skip it.
939 if (ruleInfo.merged)
940 continue;
941
942 // Find the best set of rules to group, which is simply the largest set.
943 let best = (ruleInfo.matches || []).reduce((best, matchesForIndex) =>
944 {
945 matchesForIndex = (matchesForIndex || []).filter(match =>
946 {
947 // Filter out rules that have either already been merged into other
948 // rules or have had other rules merged into them.
949 return !rulesInfo[match.index].merged &&
950 !rulesInfo[match.index].mergedInto;
951 });
952
953 return matchesForIndex.length > best.length ? matchesForIndex : best;
954 },
955 []);
956
957 let multiEdit = false;
958
959 // If we couldn't find a single rule to merge with, let's see if we have a
960 // multiple character edit. e.g. we could merge "ad" and "adserver" into
961 // "ad(server)?".
962 if (best.length == 0 && ruleInfo.multiEditMatch &&
963 !rulesInfo[ruleInfo.multiEditMatch.index].merged &&
964 !rulesInfo[ruleInfo.multiEditMatch.index].mergedInto)
965 {
966 best = [ruleInfo.multiEditMatch];
967 multiEdit = true;
968 }
969
970 if (best.length > 0)
971 {
972 let urlFilter = rule.trigger["url-filter"];
973
974 let editIndex = best[0].edit.index;
975
976 if (!multiEdit)
977 {
978 // Merge all the matching rules into this one.
979
980 let characters = [urlFilter[editIndex]];
981 let quantifier = "";
982
983 for (let match of best)
984 {
985 if (match.edit.type == "delete")
986 {
987 quantifier = "?";
988 }
989 else
990 {
991 let character = rulesInfo[match.index].rule
992 .trigger["url-filter"][editIndex];
993
994 // Insert any hyphen at the beginning so it gets interpreted as a
995 // literal hyphen.
996 if (character == "-")
997 characters.unshift(character);
998 else
999 characters.push(character);
1000 }
1001
1002 // Mark the target rule as merged so other rules don't try to merge
1003 // it again.
1004 rulesInfo[match.index].merged = true;
1005 }
1006
1007 urlFilter = urlFilter.substring(0, editIndex + 1) + quantifier +
1008 urlFilter.substring(editIndex + 1);
1009 if (characters.length > 1)
1010 {
1011 urlFilter = urlFilter.substring(0, editIndex) + "[" +
1012 characters.join("") + "]" +
1013 urlFilter.substring(editIndex + 1);
1014 }
1015 }
1016 else
1017 {
1018 let editEndIndex = best[0].edit.endIndex;
1019
1020 // Mark the target rule as merged so other rules don't try to merge it
1021 // again.
1022 rulesInfo[best[0].index].merged = true;
1023
1024 urlFilter = urlFilter.substring(0, editIndex) + "(" +
1025 urlFilter.substring(editIndex, editEndIndex) + ")?" +
1026 urlFilter.substring(editEndIndex);
1027 }
1028
1029 rule.trigger["url-filter"] = urlFilter;
1030
1031 // Mark this rule as one that has had other rules merged into it.
1032 ruleInfo.mergedInto = true;
1033 }
1034 }
1035 }
1036
1037 function mergeRulesByURLFilter(rulesInfo, exhaustive)
1038 {
1039 return async(rulesInfo.map((ruleInfo, index) => () =>
1040 findMatchesForRuleByURLFilter(rulesInfo, index, exhaustive)
1041 ))
1042 .then(() => mergeCandidateRulesByURLFilter(rulesInfo));
1043 }
1044
1045 function mergeRulesByArrayProperty(rulesInfo, propertyType, property)
1046 {
1047 if (rulesInfo.length <= 1)
1048 return;
1049
1050 let valueSet = new Set(rulesInfo[0].rule[propertyType][property]);
1051
1052 for (let i = 1; i < rulesInfo.length; i++)
1053 {
1054 for (let value of rulesInfo[i].rule[propertyType][property] || [])
1055 valueSet.add(value);
1056
1057 rulesInfo[i].merged = true;
1058 }
1059
1060 if (valueSet.size > 0)
1061 rulesInfo[0].rule[propertyType][property] = Array.from(valueSet);
1062
1063 rulesInfo[0].mergedInto = true;
1064 }
1065
1066 function groupRulesByMergeableProperty(rulesInfo, propertyType, property)
1067 {
1068 let mergeableRulesInfoByGroup = new Map();
1069
1070 for (let ruleInfo of rulesInfo)
1071 {
1072 let copy = {
1073 trigger: Object.assign({}, ruleInfo.rule.trigger),
1074 action: Object.assign({}, ruleInfo.rule.action)
1075 };
1076
1077 delete copy[propertyType][property];
1078
1079 let groupKey = JSON.stringify(copy);
1080
1081 let mergeableRulesInfo = mergeableRulesInfoByGroup.get(groupKey);
1082
1083 if (mergeableRulesInfo)
1084 mergeableRulesInfo.push(ruleInfo);
1085 else
1086 mergeableRulesInfoByGroup.set(groupKey, [ruleInfo]);
1087 }
1088
1089 return mergeableRulesInfoByGroup;
1090 }
1091
1092 function mergeRules(rules, exhaustive)
1093 {
1094 let rulesInfo = rules.map(rule => ({rule}));
1095
1096 let arrayPropertiesToMergeBy = ["resource-type", "if-domain"];
1097
1098 return async(() =>
1099 {
1100 let map = groupRulesByMergeableProperty(rulesInfo, "trigger", "url-filter");
1101 return async(Array.from(map.values()).map(mergeableRulesInfo => () =>
kzar 2017/07/25 12:18:53 If async always took a sequence as the first argum
Manish Jethani 2017/07/28 09:17:36 That's a good suggestion. If the async function t
1102 eliminateRedundantRulesByURLFilter(mergeableRulesInfo, exhaustive)
1103 .then(rulesInfo => mergeRulesByURLFilter(rulesInfo, exhaustive))
1104 ))
1105 .then(() =>
1106 {
1107 // Filter out rules that are redundant or have been merged into other
1108 // rules.
1109 rulesInfo = rulesInfo.filter(ruleInfo => !ruleInfo.redundant &&
1110 !ruleInfo.merged);
1111 });
1112 })
1113 .then(() => async(arrayPropertiesToMergeBy.map(arrayProperty => () =>
1114 {
1115 let map = groupRulesByMergeableProperty(rulesInfo, "trigger",
1116 arrayProperty);
1117 return async(Array.from(map.values()).map(mergeableRulesInfo => () =>
1118 mergeRulesByArrayProperty(mergeableRulesInfo, "trigger", arrayProperty)
1119 ))
1120 .then(() =>
1121 {
1122 rulesInfo = rulesInfo.filter(ruleInfo => !ruleInfo.merged);
1123 });
1124 })))
1125 .then(() => rulesInfo.map(ruleInfo => ruleInfo.rule));
1126 }
1127
617 let ContentBlockerList = 1128 let ContentBlockerList =
618 /** 1129 /**
619 * Create a new Adblock Plus filter to content blocker list converter 1130 * Create a new Adblock Plus filter to content blocker list converter
620 * 1131 *
1132 * @param {object} options Options for content blocker list generation
1133 *
621 * @constructor 1134 * @constructor
622 */ 1135 */
623 exports.ContentBlockerList = function () 1136 exports.ContentBlockerList = function (options)
624 { 1137 {
1138 const defaultOptions = {
1139 merge: "auto"
1140 };
1141
1142 this.options = Object.assign({}, defaultOptions, options);
1143
625 this.requestFilters = []; 1144 this.requestFilters = [];
626 this.requestExceptions = []; 1145 this.requestExceptions = [];
627 this.elemhideFilters = []; 1146 this.elemhideFilters = [];
628 this.elemhideExceptions = []; 1147 this.elemhideExceptions = [];
629 this.genericblockExceptions = []; 1148 this.genericblockExceptions = [];
630 this.generichideExceptions = []; 1149 this.generichideExceptions = [];
631 this.elemhideSelectorExceptions = new Map(); 1150 this.elemhideSelectorExceptions = new Map();
632 }; 1151 };
633 1152
634 /** 1153 /**
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
669 let domains = this.elemhideSelectorExceptions[filter.selector]; 1188 let domains = this.elemhideSelectorExceptions[filter.selector];
670 if (!domains) 1189 if (!domains)
671 domains = this.elemhideSelectorExceptions[filter.selector] = []; 1190 domains = this.elemhideSelectorExceptions[filter.selector] = [];
672 1191
673 parseDomains(filter.domains, domains, []); 1192 parseDomains(filter.domains, domains, []);
674 } 1193 }
675 }; 1194 };
676 1195
677 /** 1196 /**
678 * Generate content blocker list for all filters that were added 1197 * Generate content blocker list for all filters that were added
679 *
680 * @returns {Filter} filter Filter to convert
681 */ 1198 */
682 ContentBlockerList.prototype.generateRules = function(filter) 1199 ContentBlockerList.prototype.generateRules = function()
683 { 1200 {
684 let rules = []; 1201 let cssRules = [];
1202 let cssExceptionRules = [];
1203 let blockingRules = [];
1204 let blockingExceptionRules = [];
1205
1206 let ruleGroups = [cssRules, cssExceptionRules,
1207 blockingRules, blockingExceptionRules];
685 1208
686 let genericSelectors = []; 1209 let genericSelectors = [];
687 let groupedElemhideFilters = new Map(); 1210 let groupedElemhideFilters = new Map();
688 1211
689 for (let filter of this.elemhideFilters) 1212 for (let filter of this.elemhideFilters)
690 { 1213 {
691 let result = convertElemHideFilter(filter, this.elemhideSelectorExceptions); 1214 let result = convertElemHideFilter(filter, this.elemhideSelectorExceptions);
692 if (!result) 1215 if (!result)
693 continue; 1216 continue;
694 1217
(...skipping 26 matching lines...) Expand all
721 // --max_old_space_size=4096 1244 // --max_old_space_size=4096
722 let elemhideExceptionDomains = extractFilterDomains(this.elemhideExceptions); 1245 let elemhideExceptionDomains = extractFilterDomains(this.elemhideExceptions);
723 1246
724 let genericSelectorExceptionDomains = 1247 let genericSelectorExceptionDomains =
725 extractFilterDomains(this.generichideExceptions); 1248 extractFilterDomains(this.generichideExceptions);
726 elemhideExceptionDomains.forEach(name => 1249 elemhideExceptionDomains.forEach(name =>
727 { 1250 {
728 genericSelectorExceptionDomains.add(name); 1251 genericSelectorExceptionDomains.add(name);
729 }); 1252 });
730 1253
731 addCSSRules(rules, genericSelectors, "^https?://", 1254 addCSSRules(cssRules, genericSelectors, "^https?://",
732 genericSelectorExceptionDomains); 1255 genericSelectorExceptionDomains);
733 1256
734 groupedElemhideFilters.forEach((selectors, matchDomain) => 1257 groupedElemhideFilters.forEach((selectors, matchDomain) =>
735 { 1258 {
736 addCSSRules(rules, selectors, matchDomain, elemhideExceptionDomains); 1259 addCSSRules(cssRules, selectors, matchDomain, elemhideExceptionDomains);
737 }); 1260 });
738 1261
739 let requestFilterExceptionDomains = []; 1262 let requestFilterExceptionDomains = [];
740 for (let filter of this.genericblockExceptions) 1263 for (let filter of this.genericblockExceptions)
741 { 1264 {
742 let parsed = parseFilterRegexpSource(filter.regexpSource); 1265 let parsed = parseFilterRegexpSource(filter.regexpSource);
743 if (parsed.hostname) 1266 if (parsed.hostname)
744 requestFilterExceptionDomains.push(parsed.hostname); 1267 requestFilterExceptionDomains.push(parsed.hostname);
745 } 1268 }
746 1269
747 for (let filter of this.requestFilters) 1270 for (let filter of this.requestFilters)
748 { 1271 {
749 convertFilterAddRules(rules, filter, "block", true, 1272 convertFilterAddRules(blockingRules, filter, "block", true,
750 requestFilterExceptionDomains); 1273 requestFilterExceptionDomains);
751 } 1274 }
752 1275
753 for (let filter of this.requestExceptions) 1276 for (let filter of this.requestExceptions)
754 convertFilterAddRules(rules, filter, "ignore-previous-rules", true); 1277 convertFilterAddRules(blockingExceptionRules, filter,
kzar 2017/07/25 12:18:53 Nit: Please use braces for this for loop since it
Manish Jethani 2017/07/28 09:17:36 Done.
1278 "ignore-previous-rules", true);
755 1279
756 return rules; 1280 return async(ruleGroups.map((group, index) => () =>
1281 {
1282 let next = () =>
1283 {
1284 if (index == ruleGroups.length - 1)
1285 return ruleGroups.reduce((all, rules) => all.concat(rules), []);
1286 };
1287
1288 if (this.options.merge == "all" ||
1289 (this.options.merge == "auto" &&
1290 ruleGroups.reduce((n, group) => n + group.length, 0) > 50000))
1291 {
1292 return mergeRules(ruleGroups[index], this.options.merge == "all")
1293 .then(rules =>
1294 {
1295 ruleGroups[index] = rules;
1296 return next();
1297 });
1298 }
1299
1300 return next();
1301 }));
757 }; 1302 };
OLDNEW
« no previous file with comments | « abp2blocklist.js ('k') | test/abp2blocklist.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld