Diff
Modified: trunk/Source/WebCore/ChangeLog (183817 => 183818)
--- trunk/Source/WebCore/ChangeLog 2015-05-05 17:12:36 UTC (rev 183817)
+++ trunk/Source/WebCore/ChangeLog 2015-05-05 17:27:41 UTC (rev 183818)
@@ -1,3 +1,25 @@
+2015-05-05 Alex Christensen <[email protected]>
+
+ [Content Extensions] Combine NFAs properly and free memory as we compile.
+ https://bugs.webkit.org/show_bug.cgi?id=144485
+
+ Reviewed by Benjamin Poulain.
+
+ This patch correctly combines all regular expressions with a common prefix up to
+ the last quantified term into the same NFA. It also deletes the prefix tree as it
+ creates NFAs, thus reducing the maximum memory used when compiling.
+
+ * contentextensions/CombinedURLFilters.cpp:
+ (WebCore::ContentExtensions::CombinedURLFilters::isEmpty):
+ (WebCore::ContentExtensions::CombinedURLFilters::addPattern):
+ (WebCore::ContentExtensions::generateNFAForSubtree):
+ (WebCore::ContentExtensions::CombinedURLFilters::processNFAs):
+ (WebCore::ContentExtensions::CombinedURLFilters::clear): Deleted.
+ * contentextensions/CombinedURLFilters.h:
+ * contentextensions/ContentExtensionCompiler.cpp:
+ (WebCore::ContentExtensions::compileRuleList):
+ * contentextensions/ContentExtensionsDebugging.h:
+
2015-05-04 Alex Christensen <[email protected]>
[Content Extensions] Use less memory when writing byte code to file
Modified: trunk/Source/WebCore/contentextensions/CombinedURLFilters.cpp (183817 => 183818)
--- trunk/Source/WebCore/contentextensions/CombinedURLFilters.cpp 2015-05-05 17:12:36 UTC (rev 183817)
+++ trunk/Source/WebCore/contentextensions/CombinedURLFilters.cpp 2015-05-05 17:27:41 UTC (rev 183818)
@@ -47,7 +47,6 @@
struct PrefixTreeVertex {
PrefixTreeEdges edges;
ActionList finalActions;
- bool inVariableLengthPrefix { false };
};
#if CONTENT_EXTENSIONS_PERFORMANCE_REPORTING
@@ -116,9 +115,9 @@
{
}
-void CombinedURLFilters::clear()
+bool CombinedURLFilters::isEmpty()
{
- m_prefixTreeRoot = std::make_unique<PrefixTreeVertex>();
+ return m_prefixTreeRoot->edges.isEmpty();
}
void CombinedURLFilters::addPattern(uint64_t actionId, const Vector<Term>& pattern)
@@ -128,13 +127,8 @@
if (pattern.isEmpty())
return;
- Vector<PrefixTreeVertex*, 128> prefixTreeVerticesForPattern;
- prefixTreeVerticesForPattern.reserveInitialCapacity(pattern.size() + 1);
-
// Extend the prefix tree with the new pattern.
- bool hasNewTerm = false;
PrefixTreeVertex* lastPrefixTree = m_prefixTreeRoot.get();
- prefixTreeVerticesForPattern.append(lastPrefixTree);
for (const Term& term : pattern) {
size_t nextEntryIndex = WTF::notFound;
@@ -147,129 +141,125 @@
if (nextEntryIndex != WTF::notFound)
lastPrefixTree = lastPrefixTree->edges[nextEntryIndex].child.get();
else {
- hasNewTerm = true;
-
lastPrefixTree->edges.append(PrefixTreeEdge({term, std::make_unique<PrefixTreeVertex>()}));
lastPrefixTree = lastPrefixTree->edges.last().child.get();
}
- prefixTreeVerticesForPattern.append(lastPrefixTree);
}
- ActionList& actions = prefixTreeVerticesForPattern.last()->finalActions;
+ ActionList& actions = lastPrefixTree->finalActions;
if (actions.find(actionId) == WTF::notFound)
actions.append(actionId);
-
- if (!hasNewTerm)
- return;
-
- bool hasSeenVariableLengthTerms = false;
- for (unsigned i = pattern.size(); i--;) {
- const Term& term = pattern[i];
- hasSeenVariableLengthTerms |= !term.hasFixedLength();
- prefixTreeVerticesForPattern[i + 1]->inVariableLengthPrefix |= hasSeenVariableLengthTerms;
- }
- prefixTreeVerticesForPattern[0]->inVariableLengthPrefix |= hasSeenVariableLengthTerms;
}
-struct ActiveSubtree {
- const PrefixTreeVertex* vertex;
- PrefixTreeEdges::const_iterator iterator;
-};
-
-static void generateNFAForSubtree(NFA& nfa, unsigned rootId, const PrefixTreeVertex& prefixTreeVertex)
+static void generateNFAForSubtree(NFA& nfa, unsigned nfaRootId, PrefixTreeVertex& root)
{
- ASSERT_WITH_MESSAGE(!prefixTreeVertex.inVariableLengthPrefix, "This code assumes the subtrees with variable prefix length have already been handled.");
-
- struct ActiveNFASubtree : ActiveSubtree {
- ActiveNFASubtree(const PrefixTreeVertex* vertex, PrefixTreeEdges::const_iterator iterator, unsigned nodeIndex)
- : ActiveSubtree({ vertex, iterator })
- , lastNodeIndex(nodeIndex)
+ // This recurses the subtree of the prefix tree.
+ // For each edge that has fixed length (no quantifiers like ?, *, or +) it generates the nfa graph,
+ // recurses into children, and deletes any processed leaf nodes.
+ struct ActiveSubtree {
+ ActiveSubtree(PrefixTreeVertex& vertex, unsigned nfaNodeId, unsigned edgeIndex)
+ : vertex(vertex)
+ , nfaNodeId(nfaNodeId)
+ , edgeIndex(edgeIndex)
{
}
- unsigned lastNodeIndex;
+ PrefixTreeVertex& vertex;
+ unsigned nfaNodeId;
+ unsigned edgeIndex;
};
+ Vector<ActiveSubtree> stack;
+ if (!root.edges.isEmpty())
+ stack.append(ActiveSubtree(root, nfaRootId, 0));
+
+ // Generate graphs for each subtree that does not contain any quantifiers.
+ while (!stack.isEmpty()) {
+ PrefixTreeVertex& vertex = stack.last().vertex;
+ const unsigned edgeIndex = stack.last().edgeIndex;
- Vector<ActiveNFASubtree> activeStack;
- activeStack.append(ActiveNFASubtree(&prefixTreeVertex, prefixTreeVertex.edges.begin(), rootId));
-
- while (true) {
- ProcessSubtree:
- for (ActiveNFASubtree& activeSubtree = activeStack.last(); activeSubtree.iterator != activeSubtree.vertex->edges.end(); ++activeSubtree.iterator) {
- if (activeSubtree.iterator->child->inVariableLengthPrefix)
+ if (edgeIndex < vertex.edges.size()) {
+ auto& edge = vertex.edges[edgeIndex];
+
+ // Quantified edges in the subtree will be a part of another NFA.
+ if (!edge.term.hasFixedLength()) {
+ stack.last().edgeIndex++;
continue;
-
- const Term& term = activeSubtree.iterator->term;
- unsigned newEndNodeIndex = term.generateGraph(nfa, activeSubtree.lastNodeIndex, activeSubtree.iterator->child->finalActions);
-
- PrefixTreeVertex* prefixTreeVertex = activeSubtree.iterator->child.get();
- if (!prefixTreeVertex->edges.isEmpty()) {
- activeStack.append(ActiveNFASubtree(prefixTreeVertex, prefixTreeVertex->edges.begin(), newEndNodeIndex));
- goto ProcessSubtree;
}
+
+ unsigned subtreeRootId = edge.term.generateGraph(nfa, stack.last().nfaNodeId, edge.child->finalActions);
+ ASSERT(edge.child.get());
+ stack.append(ActiveSubtree(*edge.child.get(), subtreeRootId, 0));
+ } else {
+ ASSERT(edgeIndex == vertex.edges.size());
+ vertex.edges.removeAllMatching([](PrefixTreeEdge& edge)
+ {
+ return edge.term.isDeletedValue();
+ });
+ stack.removeLast();
+ if (!stack.isEmpty()) {
+ auto& activeSubtree = stack.last();
+ auto& edge = activeSubtree.vertex.edges[stack.last().edgeIndex];
+ if (edge.child->edges.isEmpty())
+ edge.term = Term(Term::DeletedValue); // Mark this leaf for deleting.
+ activeSubtree.edgeIndex++;
+ }
}
-
- activeStack.removeLast();
- if (activeStack.isEmpty())
- break;
- ++activeStack.last().iterator;
}
}
-void CombinedURLFilters::processNFAs(std::function<void(NFA&&)> handler) const
+void CombinedURLFilters::processNFAs(std::function<void(NFA&&)> handler)
{
- Vector<ActiveSubtree> activeStack;
- activeStack.append(ActiveSubtree({ m_prefixTreeRoot.get(), m_prefixTreeRoot->edges.begin() }));
-
+#if CONTENT_EXTENSIONS_STATE_MACHINE_DEBUGGING
+ print();
+#endif
while (true) {
- ProcessSubtree:
- ActiveSubtree& activeSubtree = activeStack.last();
-
- // We go depth first into the subtrees with variable prefix. Find the next subtree.
- for (; activeSubtree.iterator != activeSubtree.vertex->edges.end(); ++activeSubtree.iterator) {
- PrefixTreeVertex* prefixTreeVertex = activeSubtree.iterator->child.get();
- if (prefixTreeVertex->inVariableLengthPrefix) {
- activeStack.append(ActiveSubtree({ prefixTreeVertex, prefixTreeVertex->edges.begin() }));
- goto ProcessSubtree;
- }
+ // Traverse out to a leaf.
+ Vector<PrefixTreeVertex*, 128> stack;
+ PrefixTreeVertex* vertex = m_prefixTreeRoot.get();
+ while (true) {
+ ASSERT(vertex);
+ stack.append(vertex);
+ if (vertex->edges.isEmpty())
+ break;
+ vertex = vertex->edges.last().child.get();
}
-
- // After we reached here, we know that all the subtrees with variable prefixes have been processed,
- // time to generate the NFA for the graph rooted here.
- bool needToGenerate = activeSubtree.vertex->edges.isEmpty() && !activeSubtree.vertex->finalActions.isEmpty();
- if (!needToGenerate) {
- for (const auto& edge : activeSubtree.vertex->edges) {
- if (!edge.child->inVariableLengthPrefix) {
- needToGenerate = true;
- break;
- }
- }
+ if (stack.size() == 1)
+ break; // We're done once we have processed and removed all the edges in the prefix tree.
+
+ // Find the prefix root for this NFA. This is the vertex after the last term with a quantifier if there is one,
+ // or the root if there are no quantifiers left.
+ while (stack.size() > 1) {
+ if (!stack[stack.size() - 2]->edges.last().term.hasFixedLength())
+ break;
+ stack.removeLast();
}
-
- if (needToGenerate) {
- NFA nfa;
-
- unsigned prefixEnd = nfa.root();
-
- for (unsigned i = 0; i < activeStack.size() - 1; ++i) {
- const Term& term = activeStack[i].iterator->term;
- prefixEnd = term.generateGraph(nfa, prefixEnd, activeStack[i].iterator->child->finalActions);
- }
-
- for (const auto& edge : activeSubtree.vertex->edges) {
- if (!edge.child->inVariableLengthPrefix) {
- unsigned newSubtreeStart = edge.term.generateGraph(nfa, prefixEnd, edge.child->finalActions);
- generateNFAForSubtree(nfa, newSubtreeStart, *edge.child);
- }
- }
-
- handler(WTF::move(nfa));
+ ASSERT_WITH_MESSAGE(!stack.isEmpty(), "At least the root should be in the stack");
+
+ // Make an NFA with the subtrees for whom this is also the last quantifier (or who also have no quantifier).
+ NFA nfa;
+ // Put the prefix into the NFA.
+ unsigned prefixEnd = nfa.root();
+ for (unsigned i = 0; i < stack.size() - 1; ++i) {
+ ASSERT(!stack[i]->edges.isEmpty());
+ const PrefixTreeEdge& edge = stack[i]->edges.last();
+ prefixEnd = edge.term.generateGraph(nfa, prefixEnd, edge.child->finalActions);
}
-
- // We have processed all the subtrees of this level, pop the stack and move on to the next sibling.
- activeStack.removeLast();
- if (activeStack.isEmpty())
- break;
- ++activeStack.last().iterator;
+ // Put the non-quantified vertices in the subtree into the NFA and delete them.
+ ASSERT(stack.last());
+ generateNFAForSubtree(nfa, prefixEnd, *stack.last());
+
+ handler(WTF::move(nfa));
+
+ // Clean up any processed leaf nodes.
+ while (true) {
+ if (stack.size() > 1) {
+ if (stack[stack.size() - 1]->edges.isEmpty()) {
+ stack[stack.size() - 2]->edges.removeLast();
+ stack.removeLast();
+ } else
+ break; // Vertex is not a leaf.
+ } else
+ break; // Leave the empty root.
+ }
}
}
Modified: trunk/Source/WebCore/contentextensions/CombinedURLFilters.h (183817 => 183818)
--- trunk/Source/WebCore/contentextensions/CombinedURLFilters.h 2015-05-05 17:12:36 UTC (rev 183817)
+++ trunk/Source/WebCore/contentextensions/CombinedURLFilters.h 2015-05-05 17:27:41 UTC (rev 183818)
@@ -46,8 +46,8 @@
void addPattern(uint64_t patternId, const Vector<Term>& pattern);
- void processNFAs(std::function<void(NFA&&)> handler) const;
- void clear();
+ void processNFAs(std::function<void(NFA&&)> handler);
+ bool isEmpty();
#if CONTENT_EXTENSIONS_PERFORMANCE_REPORTING
size_t memoryUsed() const;
Modified: trunk/Source/WebCore/contentextensions/ContentExtensionCompiler.cpp (183817 => 183818)
--- trunk/Source/WebCore/contentextensions/ContentExtensionCompiler.cpp 2015-05-05 17:12:36 UTC (rev 183817)
+++ trunk/Source/WebCore/contentextensions/ContentExtensionCompiler.cpp 2015-05-05 17:27:41 UTC (rev 183818)
@@ -198,6 +198,7 @@
#endif
bool firstNFASeen = false;
+ // FIXME: Combine small NFAs to reduce the number of NFAs.
combinedURLFilters.processNFAs([&](NFA&& nfa) {
#if CONTENT_EXTENSIONS_STATE_MACHINE_DEBUGGING
nfa.debugPrintDot();
@@ -244,6 +245,7 @@
firstNFASeen = true;
});
+ ASSERT(combinedURLFilters.isEmpty());
if (!firstNFASeen) {
// Our bytecode interpreter expects to have at least one DFA, so if we haven't seen any
@@ -261,9 +263,6 @@
client.writeBytecode(WTF::move(bytecode));
}
- // FIXME: combinedURLFilters should be cleared incrementally as it is processing NFAs.
- combinedURLFilters.clear();
-
LOG_LARGE_STRUCTURES(universalActionLocations, universalActionLocations.capacity() * sizeof(unsigned));
universalActionLocations.clear();
Modified: trunk/Source/WebCore/contentextensions/ContentExtensionsDebugging.h (183817 => 183818)
--- trunk/Source/WebCore/contentextensions/ContentExtensionsDebugging.h 2015-05-05 17:12:36 UTC (rev 183817)
+++ trunk/Source/WebCore/contentextensions/ContentExtensionsDebugging.h 2015-05-05 17:27:41 UTC (rev 183818)
@@ -36,7 +36,7 @@
#define CONTENT_EXTENSIONS_PAGE_SIZE 16384
#if CONTENT_EXTENSIONS_PERFORMANCE_REPORTING
-#define LOG_LARGE_STRUCTURES(name, size) if (size > 1000000) { dataLogF("NAME: %s SIZE %d", #name, (int)(size)); };
+#define LOG_LARGE_STRUCTURES(name, size) if (size > 1000000) { dataLogF("NAME: %s SIZE %d\n", #name, (int)(size)); };
#else
#define LOG_LARGE_STRUCTURES(name, size)
#endif
Modified: trunk/Tools/ChangeLog (183817 => 183818)
--- trunk/Tools/ChangeLog 2015-05-05 17:12:36 UTC (rev 183817)
+++ trunk/Tools/ChangeLog 2015-05-05 17:27:41 UTC (rev 183818)
@@ -1,3 +1,15 @@
+2015-05-05 Alex Christensen <[email protected]>
+
+ [Content Extensions] Combine NFAs properly and free memory as we compile.
+ https://bugs.webkit.org/show_bug.cgi?id=144485
+
+ Reviewed by Benjamin Poulain.
+
+ * TestWebKitAPI/Tests/WebCore/ContentExtensions.cpp:
+ (TestWebKitAPI::TEST_F):
+ Added tests for correctly splitting up NFAs with unquantified terms after quantified terms.
+ Added tests for deep NFAs.
+
2015-05-04 Alex Christensen <[email protected]>
[Content Extensions] Use less memory when writing byte code to file
Modified: trunk/Tools/TestWebKitAPI/Tests/WebCore/ContentExtensions.cpp (183817 => 183818)
--- trunk/Tools/TestWebKitAPI/Tests/WebCore/ContentExtensions.cpp 2015-05-05 17:12:36 UTC (rev 183817)
+++ trunk/Tools/TestWebKitAPI/Tests/WebCore/ContentExtensions.cpp 2015-05-05 17:27:41 UTC (rev 183818)
@@ -505,10 +505,9 @@
testRequest(backend, {URL(URL(), "http://webkit.org"), URL(URL(), "http://not_webkit.org"), ResourceType::Image}, { ContentExtensions::ActionType::BlockCookies, ContentExtensions::ActionType::BlockLoad });
}
-TEST_F(ContentExtensionTest, MultiDFA)
+TEST_F(ContentExtensionTest, WideNFA)
{
// Make an NFA with about 1400 nodes.
- // FIXME: This does not make multiple DFAs anymore. Add a test that does.
StringBuilder ruleList;
ruleList.append('[');
for (char c1 = 'A'; c1 <= 'Z'; ++c1) {
@@ -543,6 +542,37 @@
testRequest(backend, mainDocumentRequest("http://webkit.org/"), { });
}
+TEST_F(ContentExtensionTest, DeepNFA)
+{
+ const unsigned size = 100000;
+
+ ContentExtensions::CombinedURLFilters combinedURLFilters;
+ ContentExtensions::URLFilterParser parser(combinedURLFilters);
+
+ // FIXME: DFAToNFA::convert takes way too long on these deep NFAs. We should optimize for that case.
+
+ StringBuilder lotsOfAs;
+ for (unsigned i = 0; i < size; ++i)
+ lotsOfAs.append('A');
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::Ok, parser.addPattern(lotsOfAs.toString().utf8().data(), false, 0));
+
+ // FIXME: Yarr ought to be able to handle 2MB regular expressions.
+ StringBuilder tooManyAs;
+ for (unsigned i = 0; i < size * 20; ++i)
+ tooManyAs.append('A');
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::YarrError, parser.addPattern(tooManyAs.toString().utf8().data(), false, 0));
+
+ StringBuilder nestedGroups;
+ for (unsigned i = 0; i < size; ++i)
+ nestedGroups.append('(');
+ for (unsigned i = 0; i < size; ++i)
+ nestedGroups.append("B)");
+ // FIXME: Add nestedGroups. Right now it also takes too long. It should be optimized.
+
+ // This should not crash and not timeout.
+ EXPECT_EQ(1ul, createNFAs(combinedURLFilters).size());
+}
+
void checkCompilerError(const char* json, ContentExtensions::ContentExtensionError expectedError)
{
WebCore::ContentExtensions::CompiledContentExtensionData extensionData;
@@ -626,9 +656,9 @@
TEST_F(ContentExtensionTest, StrictPrefixSeparatedMachines2)
{
auto backend = makeBackend("[{\"action\":{\"type\":\"block\"},\"trigger\":{\"url-filter\":\"^foo\"}},"
- "{\"action\":{\"type\":\"block\"},\"trigger\":{\"url-filter\":\"^.*[a-c]+bar\"}},"
- "{\"action\":{\"type\":\"block\"},\"trigger\":{\"url-filter\":\"^webkit:\"}},"
- "{\"action\":{\"type\":\"block\"},\"trigger\":{\"url-filter\":\"[a-c]+b+oom\"}}]");
+ "{\"action\":{\"type\":\"block\"},\"trigger\":{\"url-filter\":\"^.*[a-c]+bar\"}},"
+ "{\"action\":{\"type\":\"block\"},\"trigger\":{\"url-filter\":\"^webkit:\"}},"
+ "{\"action\":{\"type\":\"block\"},\"trigger\":{\"url-filter\":\"[a-c]+b+oom\"}}]");
testRequest(backend, mainDocumentRequest("http://webkit.org/"), { });
testRequest(backend, mainDocumentRequest("foo://webkit.org/"), { ContentExtensions::ActionType::BlockLoad });
@@ -656,6 +686,52 @@
EXPECT_EQ(3ul, createNFAs(combinedURLFilters).size());
}
+TEST_F(ContentExtensionTest, StrictPrefixSeparatedMachines3)
+{
+ auto backend = makeBackend("[{\"action\":{\"type\":\"block\"},\"trigger\":{\"url-filter\":\"A*D\"}},"
+ "{\"action\":{\"type\":\"ignore-previous-rules\"},\"trigger\":{\"url-filter\":\"A*BA+\"}},"
+ "{\"action\":{\"type\":\"block-cookies\"},\"trigger\":{\"url-filter\":\"A*BC\"}}]");
+
+ testRequest(backend, mainDocumentRequest("http://webkit.org/D"), { ContentExtensions::ActionType::BlockLoad });
+ testRequest(backend, mainDocumentRequest("http://webkit.org/AAD"), { ContentExtensions::ActionType::BlockLoad });
+ testRequest(backend, mainDocumentRequest("http://webkit.org/AB"), { });
+ testRequest(backend, mainDocumentRequest("http://webkit.org/ABA"), { }, true);
+ testRequest(backend, mainDocumentRequest("http://webkit.org/ABAD"), { }, true);
+ testRequest(backend, mainDocumentRequest("http://webkit.org/BC"), { ContentExtensions::ActionType::BlockCookies });
+ testRequest(backend, mainDocumentRequest("http://webkit.org/ABC"), { ContentExtensions::ActionType::BlockCookies });
+ testRequest(backend, mainDocumentRequest("http://webkit.org/ABABC"), { ContentExtensions::ActionType::BlockCookies }, true);
+ testRequest(backend, mainDocumentRequest("http://webkit.org/ABABCAD"), { ContentExtensions::ActionType::BlockCookies }, true);
+ testRequest(backend, mainDocumentRequest("http://webkit.org/ABCAD"), { ContentExtensions::ActionType::BlockCookies, ContentExtensions::ActionType::BlockLoad });
+}
+
+TEST_F(ContentExtensionTest, StrictPrefixSeparatedMachines3Partitioning)
+{
+ ContentExtensions::CombinedURLFilters combinedURLFilters;
+ ContentExtensions::URLFilterParser parser(combinedURLFilters);
+
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::Ok, parser.addPattern("A*D", false, 0));
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::Ok, parser.addPattern("A*BA+", false, 1));
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::Ok, parser.addPattern("A*BC", false, 2));
+
+ // "A*A" and "A*BC" can be grouped, "A*BA+" should not.
+ EXPECT_EQ(2ul, createNFAs(combinedURLFilters).size());
+}
+
+TEST_F(ContentExtensionTest, QuantifierInGroup)
+{
+ ContentExtensions::CombinedURLFilters combinedURLFilters;
+ ContentExtensions::URLFilterParser parser(combinedURLFilters);
+
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::Ok, parser.addPattern("(((A+)B)C)", false, 0));
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::Ok, parser.addPattern("(((A)B+)C)", false, 1));
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::Ok, parser.addPattern("(((A)B+)C)D", false, 2));
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::Ok, parser.addPattern("(((A)B)C+)", false, 3));
+ EXPECT_EQ(ContentExtensions::URLFilterParser::ParseStatus::Ok, parser.addPattern("(((A)B)C)", false, 4));
+
+ // (((A)B+)C) and (((A)B+)C)D should be in the same NFA.
+ EXPECT_EQ(4ul, createNFAs(combinedURLFilters).size());
+}
+
static void testPatternStatus(String pattern, ContentExtensions::URLFilterParser::ParseStatus status)
{
ContentExtensions::CombinedURLFilters combinedURLFilters;