Skip to content

Commit f77bff7

Browse files
jsteemannMongoDB Bot
authored andcommitted
SERVER-108341 Add missing detach/reattach calls to DocumentSourceLookup::_resolvedIntrospectionPipeline (#39280) (#39356)
GitOrigin-RevId: 8bcc5b675614ba61eea8482f478403599adc200f
1 parent cf2bd09 commit f77bff7

File tree

3 files changed

+99
-5
lines changed

3 files changed

+99
-5
lines changed

etc/backports_required_for_multiversion_tests.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -615,6 +615,8 @@ last-continuous:
615615
ticket: SERVER-94731
616616
- test_file: jstests/sharding/sharded_data_distribution.js
617617
ticket: SERVER-92419
618+
- test_file: jstests/sharding/query/union_with_doubly_nested_lookup.js
619+
ticket: SERVER-108341
618620
suites: null
619621
last-lts:
620622
all:
@@ -1282,4 +1284,6 @@ last-lts:
12821284
ticket: SERVER-92419
12831285
- test_file: jstests/replsets/initial_sync_source_has_capped_ttl.js
12841286
ticket: SERVER-104771
1287+
- test_file: jstests/sharding/query/union_with_doubly_nested_lookup.js
1288+
ticket: SERVER-108341
12851289
suites: null
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
/**
2+
* Test that a $unionWith with recursively nested $lookup pipelines that target other shards behave
3+
* correctly when the query requires multiple getMore calls to be completed.
4+
*
5+
* @tags: [
6+
* requires_sharding,
7+
* multiversion_incompatible,
8+
* ]
9+
*/
10+
11+
(function() {
12+
"use strict";
13+
14+
const name = jsTestName();
15+
16+
const st = new ShardingTest({shards: 2, mongos: 1});
17+
18+
function runTest(conn) {
19+
const db = conn.getDB(name);
20+
const shardedCollName = `${jsTestName()}_sharded`;
21+
const unshardedCollName = `${jsTestName()}_unsharded`;
22+
23+
assert.commandWorked(db.adminCommand({enableSharding: db.getName()}));
24+
assert.commandWorked(
25+
db.adminCommand({shardCollection: `${db.getName()}.${shardedCollName}`, key: {_id: 1}}));
26+
assert.commandWorked(
27+
db.adminCommand({split: `${db.getName()}.${shardedCollName}`, middle: {_id: 1}}));
28+
29+
const docs = Array.from(
30+
{length: 10}, (_, i) => ({_id: 0.2 * i, key: i % 5, arr: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]}));
31+
assert.commandWorked(db[shardedCollName].insert(docs));
32+
assert.commandWorked(db[unshardedCollName].insert(docs));
33+
34+
let result = db[unshardedCollName]
35+
.aggregate(
36+
[
37+
{$match: {_id: 0}},
38+
{$unwind: "$arr"},
39+
{
40+
$unionWith: {
41+
coll: shardedCollName,
42+
pipeline: [{
43+
$lookup: {
44+
from: unshardedCollName,
45+
"let": {localKey: "$key"},
46+
pipeline: [
47+
{$match: {$expr: {$eq: ["$key", "$$localKey"]}}},
48+
{
49+
$lookup: {
50+
from: unshardedCollName,
51+
"let": {localKey: "$key"},
52+
pipeline: [
53+
{$match: {$expr: {$eq: ["$key", "$$localKey"]}}},
54+
{$addFields: {computed: {$add: ["$$localKey", 1]}}},
55+
],
56+
as: "nestedMatches",
57+
}
58+
},
59+
],
60+
as: "matches",
61+
},
62+
}]
63+
}
64+
}
65+
],
66+
// Note: a small batchSize is required here so that we have multiple getMore calls.
67+
{batchSize: 2})
68+
.toArray();
69+
70+
assert.eq(20, result.length);
71+
}
72+
73+
runTest(st.s0);
74+
75+
st.stop();
76+
}());

src/mongo/db/pipeline/document_source_lookup.cpp

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1246,9 +1246,13 @@ void DocumentSourceLookUp::detachFromOperationContext() {
12461246
// use Pipeline::detachFromOperationContext() to take care of updating '_fromExpCtx->opCtx'.
12471247
_pipeline->detachFromOperationContext();
12481248
invariant(_fromExpCtx->opCtx == nullptr);
1249-
} else if (_fromExpCtx) {
1249+
}
1250+
if (_fromExpCtx) {
12501251
_fromExpCtx->opCtx = nullptr;
12511252
}
1253+
if (_resolvedIntrospectionPipeline) {
1254+
_resolvedIntrospectionPipeline->detachFromOperationContext();
1255+
}
12521256
}
12531257

12541258
void DocumentSourceLookUp::reattachToOperationContext(OperationContext* opCtx) {
@@ -1257,9 +1261,13 @@ void DocumentSourceLookUp::reattachToOperationContext(OperationContext* opCtx) {
12571261
// use Pipeline::reattachToOperationContext() to take care of updating '_fromExpCtx->opCtx'.
12581262
_pipeline->reattachToOperationContext(opCtx);
12591263
invariant(_fromExpCtx->opCtx == opCtx);
1260-
} else if (_fromExpCtx) {
1264+
}
1265+
if (_fromExpCtx) {
12611266
_fromExpCtx->opCtx = opCtx;
12621267
}
1268+
if (_resolvedIntrospectionPipeline) {
1269+
_resolvedIntrospectionPipeline->reattachToOperationContext(opCtx);
1270+
}
12631271
}
12641272

12651273
bool DocumentSourceLookUp::validateOperationContext(const OperationContext* opCtx) const {
@@ -1269,9 +1277,15 @@ bool DocumentSourceLookUp::validateOperationContext(const OperationContext* opCt
12691277

12701278
if (_pipeline) {
12711279
const auto& sources = _pipeline->getSources();
1272-
return std::all_of(sources.begin(), sources.end(), [opCtx](const auto& s) {
1273-
return s->validateOperationContext(opCtx);
1274-
});
1280+
if (!std::all_of(sources.begin(), sources.end(), [opCtx](const auto& s) {
1281+
return s->validateOperationContext(opCtx);
1282+
})) {
1283+
return false;
1284+
}
1285+
}
1286+
if (_resolvedIntrospectionPipeline &&
1287+
!_resolvedIntrospectionPipeline->validateOperationContext(opCtx)) {
1288+
return false;
12751289
}
12761290

12771291
return true;

0 commit comments

Comments
 (0)