diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/mongo/db/matcher/expression_algo_test.cpp | 8 | ||||
-rw-r--r-- | src/mongo/db/query/canonical_query_encoder.cpp | 9 | ||||
-rw-r--r-- | src/mongo/db/query/canonical_query_encoder.h | 15 | ||||
-rw-r--r-- | src/mongo/db/query/plan_cache.cpp | 32 | ||||
-rw-r--r-- | src/mongo/db/query/plan_cache_indexability.cpp | 13 | ||||
-rw-r--r-- | src/mongo/db/query/plan_cache_indexability.h | 32 | ||||
-rw-r--r-- | src/mongo/db/query/plan_cache_indexability_test.cpp | 148 | ||||
-rw-r--r-- | src/mongo/db/query/plan_cache_test.cpp | 63 |
8 files changed, 215 insertions, 105 deletions
diff --git a/src/mongo/db/matcher/expression_algo_test.cpp b/src/mongo/db/matcher/expression_algo_test.cpp index 68075fbb396..41bbb9b1799 100644 --- a/src/mongo/db/matcher/expression_algo_test.cpp +++ b/src/mongo/db/matcher/expression_algo_test.cpp @@ -177,6 +177,14 @@ TEST(ExpressionAlgoIsSubsetOf, CompareAnd_GT) { ASSERT_FALSE(expression::isSubsetOf(filter.get(), query.get())); } +TEST(ExpressionAlgoIsSubsetOf, CompareAnd_SingleField) { + ParsedMatchExpression filter("{a: {$gt: 5, $lt: 7}}"); + ParsedMatchExpression query("{a: {$gt: 5, $lt: 6}}"); + + ASSERT_TRUE(expression::isSubsetOf(query.get(), filter.get())); + ASSERT_FALSE(expression::isSubsetOf(filter.get(), query.get())); +} + TEST(ExpressionAlgoIsSubsetOf, CompareOr_LT) { ParsedMatchExpression lt5("{a: {$lt: 5}}"); ParsedMatchExpression eq2OrEq3("{$or: [{a: 2}, {a: 3}]}"); diff --git a/src/mongo/db/query/canonical_query_encoder.cpp b/src/mongo/db/query/canonical_query_encoder.cpp index 9079f27b44c..b51b3ac7583 100644 --- a/src/mongo/db/query/canonical_query_encoder.cpp +++ b/src/mongo/db/query/canonical_query_encoder.cpp @@ -65,15 +65,6 @@ bool isQueryNegatingEqualToNull(const mongo::MatchExpression* tree) { namespace { -// Delimiters for cache key encoding. -const char kEncodeChildrenBegin = '['; -const char kEncodeChildrenEnd = ']'; -const char kEncodeChildrenSeparator = ','; -const char kEncodeCollationSection = '#'; -const char kEncodeProjectionSection = '|'; -const char kEncodeRegexFlagsSeparator = '/'; -const char kEncodeSortSection = '~'; - /** * Encode user-provided string. Cache key delimiters seen in the * user string are escaped with a backslash. diff --git a/src/mongo/db/query/canonical_query_encoder.h b/src/mongo/db/query/canonical_query_encoder.h index 19fcc02b8a4..be84fd7b78b 100644 --- a/src/mongo/db/query/canonical_query_encoder.h +++ b/src/mongo/db/query/canonical_query_encoder.h @@ -33,6 +33,21 @@ namespace mongo { +// Delimiters for canonical query portion of cache key encoding. +inline constexpr char kEncodeChildrenBegin = '['; +inline constexpr char kEncodeChildrenEnd = ']'; +inline constexpr char kEncodeChildrenSeparator = ','; +inline constexpr char kEncodeCollationSection = '#'; +inline constexpr char kEncodeProjectionSection = '|'; +inline constexpr char kEncodeRegexFlagsSeparator = '/'; +inline constexpr char kEncodeSortSection = '~'; + +// Delimiters for the discriminator portion of the cache key encoding. +inline constexpr char kEncodeDiscriminatorsBegin = '<'; +inline constexpr char kEncodeDiscriminatorsEnd = '>'; +inline constexpr char kEncodeGlobalDiscriminatorsBegin = '('; +inline constexpr char kEncodeGlobalDiscriminatorsEnd = ')'; + /** * Returns true if the query predicate involves a negation of an EQ, LTE, or GTE comparison to * 'null'. diff --git a/src/mongo/db/query/plan_cache.cpp b/src/mongo/db/query/plan_cache.cpp index 23d939df000..77bc3fb0898 100644 --- a/src/mongo/db/query/plan_cache.cpp +++ b/src/mongo/db/query/plan_cache.cpp @@ -62,10 +62,6 @@ namespace { ServerStatusMetricField<Counter64> totalPlanCacheSizeEstimateBytesMetric( "query.planCacheTotalSizeEstimateBytes", &PlanCacheEntry::planCacheTotalSizeEstimateBytes); -// Delimiters for cache key encoding. -const char kEncodeDiscriminatorsBegin = '<'; -const char kEncodeDiscriminatorsEnd = '>'; - void encodeIndexabilityForDiscriminators(const MatchExpression* tree, const IndexToDiscriminatorMap& discriminators, StringBuilder* keyBuilder) { @@ -74,12 +70,12 @@ void encodeIndexabilityForDiscriminators(const MatchExpression* tree, } } -void encodeIndexability(const MatchExpression* tree, - const PlanCacheIndexabilityState& indexabilityState, - StringBuilder* keyBuilder) { +void encodeIndexabilityRecursive(const MatchExpression* tree, + const PlanCacheIndexabilityState& indexabilityState, + StringBuilder* keyBuilder) { if (!tree->path().empty()) { const IndexToDiscriminatorMap& discriminators = - indexabilityState.getDiscriminators(tree->path()); + indexabilityState.getPathDiscriminators(tree->path()); IndexToDiscriminatorMap wildcardDiscriminators = indexabilityState.buildWildcardDiscriminators(tree->path()); if (!discriminators.empty() || !wildcardDiscriminators.empty()) { @@ -99,8 +95,26 @@ void encodeIndexability(const MatchExpression* tree, } for (size_t i = 0; i < tree->numChildren(); ++i) { - encodeIndexability(tree->getChild(i), indexabilityState, keyBuilder); + encodeIndexabilityRecursive(tree->getChild(i), indexabilityState, keyBuilder); + } +} + +void encodeIndexability(const MatchExpression* tree, + const PlanCacheIndexabilityState& indexabilityState, + StringBuilder* keyBuilder) { + // Before encoding the indexability of the leaf MatchExpressions, apply the global + // discriminators to the expression as a whole. This is for cases such as partial indexes which + // must discriminate based on the entire query. + const auto& globalDiscriminators = indexabilityState.getGlobalDiscriminators(); + if (!globalDiscriminators.empty()) { + *keyBuilder << kEncodeGlobalDiscriminatorsBegin; + for (auto&& indexAndDiscriminatorPair : globalDiscriminators) { + *keyBuilder << indexAndDiscriminatorPair.second.isMatchCompatibleWithIndex(tree); + } + *keyBuilder << kEncodeGlobalDiscriminatorsEnd; } + + encodeIndexabilityRecursive(tree, indexabilityState, keyBuilder); } } // namespace diff --git a/src/mongo/db/query/plan_cache_indexability.cpp b/src/mongo/db/query/plan_cache_indexability.cpp index a829be528bc..00b9f0192ff 100644 --- a/src/mongo/db/query/plan_cache_indexability.cpp +++ b/src/mongo/db/query/plan_cache_indexability.cpp @@ -78,7 +78,6 @@ IndexabilityDiscriminator getCollatedIndexDiscriminator(const CollatorInterface* } return true; } - // The predicate never compares strings so it is not affected by collation. return true; }; @@ -103,14 +102,7 @@ void PlanCacheIndexabilityState::processSparseIndex(const std::string& indexName void PlanCacheIndexabilityState::processPartialIndex(const std::string& indexName, const MatchExpression* filterExpr) { - invariant(filterExpr); - for (size_t i = 0; i < filterExpr->numChildren(); ++i) { - processPartialIndex(indexName, filterExpr->getChild(i)); - } - if (filterExpr->getCategory() != MatchExpression::MatchCategory::kLogical) { - _pathDiscriminatorsMap[filterExpr->path()][indexName].addDiscriminator( - getPartialIndexDiscriminator(filterExpr)); - } + _globalDiscriminatorMap[indexName].addDiscriminator(getPartialIndexDiscriminator(filterExpr)); } void PlanCacheIndexabilityState::processWildcardIndex(const CoreIndexInfo& cii) { @@ -133,7 +125,7 @@ namespace { const IndexToDiscriminatorMap emptyDiscriminators{}; } // namespace -const IndexToDiscriminatorMap& PlanCacheIndexabilityState::getDiscriminators( +const IndexToDiscriminatorMap& PlanCacheIndexabilityState::getPathDiscriminators( StringData path) const { PathDiscriminatorsMap::const_iterator it = _pathDiscriminatorsMap.find(path); if (it == _pathDiscriminatorsMap.end()) { @@ -164,6 +156,7 @@ IndexToDiscriminatorMap PlanCacheIndexabilityState::buildWildcardDiscriminators( void PlanCacheIndexabilityState::updateDiscriminators( const std::vector<CoreIndexInfo>& indexCores) { _pathDiscriminatorsMap = PathDiscriminatorsMap(); + _globalDiscriminatorMap = IndexToDiscriminatorMap(); _wildcardIndexDiscriminators.clear(); for (const auto& idx : indexCores) { diff --git a/src/mongo/db/query/plan_cache_indexability.h b/src/mongo/db/query/plan_cache_indexability.h index ccb150a1523..bd74b14a57e 100644 --- a/src/mongo/db/query/plan_cache_indexability.h +++ b/src/mongo/db/query/plan_cache_indexability.h @@ -46,6 +46,7 @@ struct CoreIndexInfo; using IndexabilityDiscriminator = stdx::function<bool(const MatchExpression* me)>; using IndexabilityDiscriminators = std::vector<IndexabilityDiscriminator>; using IndexToDiscriminatorMap = StringMap<CompositeIndexabilityDiscriminator>; +using PathDiscriminatorsMap = StringMap<IndexToDiscriminatorMap>; /** * CompositeIndexabilityDiscriminator holds all indexability discriminators for a particular path, @@ -76,9 +77,14 @@ private: }; /** - * PlanCacheIndexabilityState holds a set of "indexability discriminators" for certain paths. - * An indexability discriminator is a binary predicate function, used to classify match - * expressions based on the data values in the expression. + * PlanCacheIndexabilityState holds a set of "indexability discriminators. An indexability + * discriminator is a binary predicate function, used to classify match expressions based on the + * data values in the expression. + * + * These discriminators are used to distinguish between queries of a similar shape but not the same + * candidate indexes. So each discriminator typically represents a decision like "is this index + * valid?" or "does this piece of the query disqualify it from using this index?". The output of + * these decisions is included in the plan cache key. */ class PlanCacheIndexabilityState { PlanCacheIndexabilityState(const PlanCacheIndexabilityState&) = delete; @@ -94,7 +100,15 @@ public: * The object returned by reference is valid until the next call to updateDiscriminators() or * until destruction of 'this', whichever is first. */ - const IndexToDiscriminatorMap& getDiscriminators(StringData path) const; + const IndexToDiscriminatorMap& getPathDiscriminators(StringData path) const; + + /** + * Returns a map of index name to discriminator set. These discriminators are not + * associated with a particular path of a query and apply to the entire MatchExpression. + */ + const IndexToDiscriminatorMap& getGlobalDiscriminators() const { + return _globalDiscriminatorMap; + } /** * Construct an IndexToDiscriminator map for the given path, only for the wildcard indexes @@ -108,8 +122,6 @@ public: void updateDiscriminators(const std::vector<CoreIndexInfo>& indexCores); private: - using PathDiscriminatorsMap = StringMap<IndexToDiscriminatorMap>; - /** * A $** index may index an infinite number of fields. We cannot just store a discriminator for * every possible field that it indexes, so we have to maintain some special context about the @@ -141,8 +153,8 @@ private: void processSparseIndex(const std::string& indexName, const BSONObj& keyPattern); /** - * Adds partial index discriminators for the partial index with the given filter expression - * to the discriminators for that index in '_pathDiscriminatorsMap'. + * Adds a global discriminator for the partial index with the given filter expression + * to the discriminators for that index in '_globalDiscriminatorMap'. * * A partial index discriminator distinguishes expressions that match a given partial index * predicate from expressions that don't match the partial index predicate. For example, @@ -173,6 +185,10 @@ private: // PathDiscriminatorsMap is a map from field path to index name to IndexabilityDiscriminator. PathDiscriminatorsMap _pathDiscriminatorsMap; + // Map from index name to global discriminators. These are discriminators which do not apply to + // a single path but the entire MatchExpression. + IndexToDiscriminatorMap _globalDiscriminatorMap; + std::vector<WildcardIndexDiscriminatorContext> _wildcardIndexDiscriminators; }; diff --git a/src/mongo/db/query/plan_cache_indexability_test.cpp b/src/mongo/db/query/plan_cache_indexability_test.cpp index 6b4b2b367c3..ae464339f7a 100644 --- a/src/mongo/db/query/plan_cache_indexability_test.cpp +++ b/src/mongo/db/query/plan_cache_indexability_test.cpp @@ -92,7 +92,7 @@ TEST(PlanCacheIndexabilityTest, SparseIndexSimple) { nullptr, nullptr)}); - auto discriminators = state.getDiscriminators("a"); + auto discriminators = state.getPathDiscriminators("a"); ASSERT_EQ(1U, discriminators.size()); ASSERT(discriminators.find("a_1") != discriminators.end()); @@ -133,7 +133,7 @@ TEST(PlanCacheIndexabilityTest, SparseIndexCompound) { nullptr)}); { - auto discriminators = state.getDiscriminators("a"); + auto discriminators = state.getPathDiscriminators("a"); ASSERT_EQ(1U, discriminators.size()); ASSERT(discriminators.find("a_1_b_1") != discriminators.end()); @@ -146,7 +146,7 @@ TEST(PlanCacheIndexabilityTest, SparseIndexCompound) { } { - auto discriminators = state.getDiscriminators("b"); + auto discriminators = state.getPathDiscriminators("b"); ASSERT_EQ(1U, discriminators.size()); ASSERT(discriminators.find("a_1_b_1") != discriminators.end()); @@ -179,12 +179,17 @@ TEST(PlanCacheIndexabilityTest, PartialIndexSimple) { nullptr, nullptr)}); + // The partial index is represented as a global discriminator that applies to the entire + // incoming MatchExpression. { - auto discriminators = state.getDiscriminators("f"); - ASSERT_EQ(1U, discriminators.size()); - ASSERT(discriminators.find("a_1") != discriminators.end()); + auto discriminators = state.getPathDiscriminators("f"); + ASSERT_EQ(0U, discriminators.size()); - auto disc = discriminators["a_1"]; + auto globalDiscriminators = state.getGlobalDiscriminators(); + ASSERT_EQ(1U, globalDiscriminators.size()); + ASSERT(globalDiscriminators.find("a_1") != globalDiscriminators.end()); + + auto disc = globalDiscriminators["a_1"]; ASSERT_EQ(false, disc.isMatchCompatibleWithIndex( parseMatchExpression(BSON("f" << BSON("$gt" << -5))).get())); @@ -194,7 +199,7 @@ TEST(PlanCacheIndexabilityTest, PartialIndexSimple) { } { - auto discriminators = state.getDiscriminators("a"); + auto discriminators = state.getPathDiscriminators("a"); ASSERT_EQ(1U, discriminators.size()); ASSERT(discriminators.find("a_1") != discriminators.end()); @@ -228,32 +233,52 @@ TEST(PlanCacheIndexabilityTest, PartialIndexAnd) { nullptr, nullptr)}); + // partial index discriminators are global to the entire query, so an individual path should not + // have any discriminators. Also the entire query must be a subset of the partial filter + // expression, not just the leaves. + auto globalDiscriminators = state.getGlobalDiscriminators(); + ASSERT(globalDiscriminators.find("a_1") != globalDiscriminators.end()); + auto globalDisc = globalDiscriminators["a_1"]; + { - auto discriminators = state.getDiscriminators("f"); - ASSERT_EQ(1U, discriminators.size()); - ASSERT(discriminators.find("a_1") != discriminators.end()); + auto discriminators = state.getPathDiscriminators("f"); + ASSERT_EQ(0U, discriminators.size()); - auto disc = discriminators["a_1"]; - ASSERT_EQ(false, - disc.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 0)).get())); - ASSERT_EQ(true, - disc.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 1)).get())); + ASSERT_EQ( + false, + globalDisc.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 0)).get())); + ASSERT_EQ( + false, + globalDisc.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 1)).get())); } { - auto discriminators = state.getDiscriminators("g"); - ASSERT_EQ(1U, discriminators.size()); - ASSERT(discriminators.find("a_1") != discriminators.end()); + auto discriminators = state.getPathDiscriminators("g"); + ASSERT_EQ(0U, discriminators.size()); - auto disc = discriminators["a_1"]; + ASSERT_EQ( + false, + globalDisc.isMatchCompatibleWithIndex(parseMatchExpression(BSON("g" << 0)).get())); + ASSERT_EQ( + false, + globalDisc.isMatchCompatibleWithIndex(parseMatchExpression(BSON("g" << 1)).get())); + } + + { + // A match expression which is covered entirely by the partial filter should pass the global + // discriminator. ASSERT_EQ(false, - disc.isMatchCompatibleWithIndex(parseMatchExpression(BSON("g" << 0)).get())); + globalDisc.isMatchCompatibleWithIndex( + parseMatchExpression(BSON("g" << 1 << "f" << 0)).get())); ASSERT_EQ(true, - disc.isMatchCompatibleWithIndex(parseMatchExpression(BSON("g" << 1)).get())); + globalDisc.isMatchCompatibleWithIndex( + parseMatchExpression(BSON("g" << 1 << "f" << 1)).get())); } { - auto discriminators = state.getDiscriminators("a"); + // The path 'a' will still have a discriminator for the collation (even though it's + // defaulted). + auto discriminators = state.getPathDiscriminators("a"); ASSERT_EQ(1U, discriminators.size()); ASSERT(discriminators.find("a_1") != discriminators.end()); @@ -302,33 +327,44 @@ TEST(PlanCacheIndexabilityTest, MultiplePartialIndexes) { nullptr, nullptr)}); - { - auto discriminators = state.getDiscriminators("f"); - ASSERT_EQ(2U, discriminators.size()); - ASSERT(discriminators.find("a_1") != discriminators.end()); - ASSERT(discriminators.find("b_1") != discriminators.end()); + // partial index discriminators are global to the entire query, so an individual path within the + // partial filter should not have any discriminators. Also the entire query must be a subset of + // the partial filter expression, not just the leaves. + auto globalDiscriminators = state.getGlobalDiscriminators(); + ASSERT(globalDiscriminators.find("a_1") != globalDiscriminators.end()); + ASSERT(globalDiscriminators.find("b_1") != globalDiscriminators.end()); + auto globalDiscA = globalDiscriminators["a_1"]; + auto globalDiscB = globalDiscriminators["b_1"]; - auto discA = discriminators["a_1"]; - auto discB = discriminators["b_1"]; + { + auto discriminators = state.getPathDiscriminators("f"); + ASSERT_EQ(0U, discriminators.size()); - ASSERT_EQ(false, - discA.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 0)).get())); - ASSERT_EQ(false, - discB.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 0)).get())); + ASSERT_EQ( + false, + globalDiscA.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 0)).get())); + ASSERT_EQ( + false, + globalDiscB.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 0)).get())); - ASSERT_EQ(true, - discA.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 1)).get())); - ASSERT_EQ(false, - discB.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 1)).get())); + ASSERT_EQ( + true, + globalDiscA.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 1)).get())); + ASSERT_EQ( + false, + globalDiscB.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 1)).get())); - ASSERT_EQ(false, - discA.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 2)).get())); - ASSERT_EQ(true, - discB.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 2)).get())); + ASSERT_EQ( + false, + globalDiscA.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 2)).get())); + ASSERT_EQ( + true, + globalDiscB.isMatchCompatibleWithIndex(parseMatchExpression(BSON("f" << 2)).get())); } + // The paths 'a' and 'b' will have one discriminator each to capture the collation of the index. { - auto discriminators = state.getDiscriminators("a"); + auto discriminators = state.getPathDiscriminators("a"); ASSERT_EQ(1U, discriminators.size()); ASSERT(discriminators.find("a_1") != discriminators.end()); @@ -342,7 +378,7 @@ TEST(PlanCacheIndexabilityTest, MultiplePartialIndexes) { } { - auto discriminators = state.getDiscriminators("b"); + auto discriminators = state.getPathDiscriminators("b"); ASSERT_EQ(1U, discriminators.size()); ASSERT(discriminators.find("b_1") != discriminators.end()); @@ -374,7 +410,7 @@ TEST(PlanCacheIndexabilityTest, IndexNeitherSparseNorPartial) { BSONObj(), nullptr, nullptr)}); - auto discriminators = state.getDiscriminators("a"); + auto discriminators = state.getPathDiscriminators("a"); ASSERT_EQ(1U, discriminators.size()); ASSERT(discriminators.find("a_1") != discriminators.end()); } @@ -399,7 +435,7 @@ TEST(PlanCacheIndexabilityTest, DiscriminatorForCollationIndicatesWhenCollations entry.collator = &collator; state.updateDiscriminators({entry}); - auto discriminators = state.getDiscriminators("a"); + auto discriminators = state.getPathDiscriminators("a"); ASSERT_EQ(1U, discriminators.size()); ASSERT(discriminators.find("a_1") != discriminators.end()); @@ -484,11 +520,11 @@ TEST(PlanCacheIndexabilityTest, CompoundIndexCollationDiscriminator) { nullptr, nullptr)}); - auto discriminatorsA = state.getDiscriminators("a"); + auto discriminatorsA = state.getPathDiscriminators("a"); ASSERT_EQ(1U, discriminatorsA.size()); ASSERT(discriminatorsA.find("a_1_b_1") != discriminatorsA.end()); - auto discriminatorsB = state.getDiscriminators("b"); + auto discriminatorsB = state.getPathDiscriminators("b"); ASSERT_EQ(1U, discriminatorsB.size()); ASSERT(discriminatorsB.find("a_1_b_1") != discriminatorsB.end()); } @@ -593,13 +629,15 @@ TEST(PlanCacheIndexabilityTest, WildcardPartialIndexDiscriminator) { ASSERT_TRUE(wildcardDiscriminators.isMatchCompatibleWithIndex( parseMatchExpression(fromjson("{b: 6}")).get())); - // The regular (non-wildcard) set of discriminators for the path "a" should reflect whether a - // predicate on "a" is compatible with the partial filter expression. + // The global discriminator for the index "indexName" should reflect whether a MatchExpression + // is compatible with the partial filter expression. { - discriminatorsA = state.getDiscriminators("a"); - auto discriminatorsIt = discriminatorsA.find("indexName"); - ASSERT(discriminatorsIt != discriminatorsA.end()); - auto disc = discriminatorsIt->second; + discriminatorsA = state.getPathDiscriminators("a"); + ASSERT(discriminatorsA.find("indexName") == discriminatorsA.end()); + + auto globalDisc = state.getGlobalDiscriminators(); + ASSERT(globalDisc.find("indexName") != globalDisc.end()); + auto disc = globalDisc["indexName"]; ASSERT_FALSE( disc.isMatchCompatibleWithIndex(parseMatchExpression(fromjson("{a: 0}")).get())); @@ -614,7 +652,7 @@ TEST(PlanCacheIndexabilityTest, WildcardPartialIndexDiscriminator) { // There shouldn't be any regular discriminators associated with path "b". { - auto&& discriminatorsB = state.getDiscriminators("b"); + auto&& discriminatorsB = state.getPathDiscriminators("b"); ASSERT_FALSE(discriminatorsB.count("indexName")); } } diff --git a/src/mongo/db/query/plan_cache_test.cpp b/src/mongo/db/query/plan_cache_test.cpp index fc8db4042fe..4fad5f431bc 100644 --- a/src/mongo/db/query/plan_cache_test.cpp +++ b/src/mongo/db/query/plan_cache_test.cpp @@ -1858,6 +1858,41 @@ TEST(PlanCacheTest, ComputeKeyPartialIndex) { planCache.computeKey(*cqGtZero)); } +TEST(PlanCacheTest, ComputeKeyPartialIndexConjunction) { + BSONObj filterObj = fromjson("{f: {$gt: 0, $lt: 10}}"); + unique_ptr<MatchExpression> filterExpr(parseMatchExpression(filterObj)); + + PlanCache planCache; + const auto keyPattern = BSON("a" << 1); + planCache.notifyOfIndexUpdates( + {CoreIndexInfo(keyPattern, + IndexNames::nameToType(IndexNames::findPluginName(keyPattern)), + false, // sparse + IndexEntry::Identifier{""}, // name + filterExpr.get())}); // filterExpr + + unique_ptr<CanonicalQuery> satisfySinglePredicate(canonicalize("{f: {$gt: 0}}")); + ASSERT_EQ(planCache.computeKey(*satisfySinglePredicate).getUnstablePart(), "(0)"); + + unique_ptr<CanonicalQuery> satisfyBothPredicates(canonicalize("{f: {$eq: 5}}")); + ASSERT_EQ(planCache.computeKey(*satisfyBothPredicates).getUnstablePart(), "(1)"); + + unique_ptr<CanonicalQuery> conjSingleField(canonicalize("{f: {$gt: 2, $lt: 9}}")); + ASSERT_EQ(planCache.computeKey(*conjSingleField).getUnstablePart(), "(1)"); + + unique_ptr<CanonicalQuery> conjSingleFieldNoMatch(canonicalize("{f: {$gt: 2, $lt: 11}}")); + ASSERT_EQ(planCache.computeKey(*conjSingleFieldNoMatch).getUnstablePart(), "(0)"); + + // Note that these queries get optimized to a single $in over 'f'. + unique_ptr<CanonicalQuery> disjSingleFieldBothSatisfy( + canonicalize("{$or: [{f: {$eq: 2}}, {f: {$eq: 3}}]}")); + ASSERT_EQ(planCache.computeKey(*disjSingleFieldBothSatisfy).getUnstablePart(), "(1)"); + + unique_ptr<CanonicalQuery> disjSingleFieldNotSubset( + canonicalize("{$or: [{f: {$eq: 2}}, {f: {$eq: 11}}]}")); + ASSERT_EQ(planCache.computeKey(*disjSingleFieldNotSubset).getUnstablePart(), "(0)"); +} + // Query shapes should get the same plan cache key if they have the same collation indexability. TEST(PlanCacheTest, ComputeKeyCollationIndex) { CollatorInterfaceMock collator(CollatorInterfaceMock::MockType::kReverseString); @@ -2035,8 +2070,8 @@ TEST(PlanCacheTest, ComputeKeyWildcardDiscriminatesCorrectlyBasedOnPartialFilter // The discriminator strings have the format "<xx>". That is, there are two discriminator // bits for the "x" predicate, the first pertaining to the partialFilterExpression and the // second around applicability to the wildcard index. - ASSERT_EQ(compatibleKey.getUnstablePart(), "<11>"); - ASSERT_EQ(incompatibleKey.getUnstablePart(), "<01>"); + ASSERT_EQ(compatibleKey.getUnstablePart(), "(1)<1>"); + ASSERT_EQ(incompatibleKey.getUnstablePart(), "(0)<1>"); } // The partialFilterExpression should lead to a discriminator over field 'x', but not over 'y'. @@ -2051,8 +2086,8 @@ TEST(PlanCacheTest, ComputeKeyWildcardDiscriminatesCorrectlyBasedOnPartialFilter // The discriminator strings have the format "<xx><y>". That is, there are two discriminator // bits for the "x" predicate (the first pertaining to the partialFilterExpression, the // second around applicability to the wildcard index) and one discriminator bit for "y". - ASSERT_EQ(compatibleKey.getUnstablePart(), "<11><1>"); - ASSERT_EQ(incompatibleKey.getUnstablePart(), "<01><1>"); + ASSERT_EQ(compatibleKey.getUnstablePart(), "(1)<1><1>"); + ASSERT_EQ(incompatibleKey.getUnstablePart(), "(0)<1><1>"); } // $eq:null predicates cannot be assigned to a wildcard index. Make sure that this is @@ -2067,8 +2102,8 @@ TEST(PlanCacheTest, ComputeKeyWildcardDiscriminatesCorrectlyBasedOnPartialFilter // The discriminator strings have the format "<xx><y>". That is, there are two discriminator // bits for the "x" predicate (the first pertaining to the partialFilterExpression, the // second around applicability to the wildcard index) and one discriminator bit for "y". - ASSERT_EQ(compatibleKey.getUnstablePart(), "<11><1>"); - ASSERT_EQ(incompatibleKey.getUnstablePart(), "<11><0>"); + ASSERT_EQ(compatibleKey.getUnstablePart(), "(1)<1><1>"); + ASSERT_EQ(incompatibleKey.getUnstablePart(), "(1)<1><0>"); } // Test that the discriminators are correct for an $eq:null predicate on 'x'. This predicate is @@ -2077,7 +2112,7 @@ TEST(PlanCacheTest, ComputeKeyWildcardDiscriminatesCorrectlyBasedOnPartialFilter // result in two "0" bits inside the discriminator string. { auto key = planCache.computeKey(*canonicalize("{x: {$eq: null}}")); - ASSERT_EQ(key.getUnstablePart(), "<00>"); + ASSERT_EQ(key.getUnstablePart(), "(0)<0>"); } } @@ -2098,7 +2133,7 @@ TEST(PlanCacheTest, ComputeKeyWildcardDiscriminatesCorrectlyWithPartialFilterAnd // discriminator because it is not referenced in the partial filter expression. All // predicates are compatible. auto key = planCache.computeKey(*canonicalize("{x: {$eq: 1}, y: {$eq: 2}, z: {$eq: 3}}")); - ASSERT_EQ(key.getUnstablePart(), "<11><11><1>"); + ASSERT_EQ(key.getUnstablePart(), "(1)<1><1><1>"); } { @@ -2106,7 +2141,7 @@ TEST(PlanCacheTest, ComputeKeyWildcardDiscriminatesCorrectlyWithPartialFilterAnd // compatible with the partial filter expression, leading to one of the 'y' bits being set // to zero. auto key = planCache.computeKey(*canonicalize("{x: {$eq: 1}, y: {$eq: -2}, z: {$eq: 3}}")); - ASSERT_EQ(key.getUnstablePart(), "<11><01><1>"); + ASSERT_EQ(key.getUnstablePart(), "(0)<1><1><1>"); } } @@ -2125,14 +2160,14 @@ TEST(PlanCacheTest, ComputeKeyWildcardDiscriminatesCorrectlyWithPartialFilterOnN // The discriminators have the format <x><(x.y)(x.y)<y>. All predicates are compatible auto key = planCache.computeKey(*canonicalize("{x: {$eq: 1}, y: {$eq: 2}, 'x.y': {$eq: 3}}")); - ASSERT_EQ(key.getUnstablePart(), "<1><11><1>"); + ASSERT_EQ(key.getUnstablePart(), "(1)<1><1><1>"); } { // Here, the predicate on "x.y" is not compatible with the partial filter expression. auto key = planCache.computeKey(*canonicalize("{x: {$eq: 1}, y: {$eq: 2}, 'x.y': {$eq: -3}}")); - ASSERT_EQ(key.getUnstablePart(), "<1><01><1>"); + ASSERT_EQ(key.getUnstablePart(), "(0)<1><1><1>"); } } @@ -2152,21 +2187,21 @@ TEST(PlanCacheTest, ComputeKeyDiscriminatesCorrectlyWithPartialFilterAndWildcard // the predicate is compatible with the partial filter expression, whereas the disciminator // for 'y' is about compatibility with the wildcard index. auto key = planCache.computeKey(*canonicalize("{x: {$eq: 1}, y: {$eq: 2}, z: {$eq: 3}}")); - ASSERT_EQ(key.getUnstablePart(), "<1><1>"); + ASSERT_EQ(key.getUnstablePart(), "(1)<1>"); } { // Similar to the previous case, except with an 'x' predicate that is incompatible with the // partial filter expression. auto key = planCache.computeKey(*canonicalize("{x: {$eq: -1}, y: {$eq: 2}, z: {$eq: 3}}")); - ASSERT_EQ(key.getUnstablePart(), "<0><1>"); + ASSERT_EQ(key.getUnstablePart(), "(0)<1>"); } { // Case where the 'y' predicate is not compatible with the wildcard index. auto key = planCache.computeKey(*canonicalize("{x: {$eq: 1}, y: {$eq: null}, z: {$eq: 3}}")); - ASSERT_EQ(key.getUnstablePart(), "<1><0>"); + ASSERT_EQ(key.getUnstablePart(), "(1)<0>"); } } |