Skip to content

Commit

Permalink
SERVER-92904 Consider PBRT size when creating cursor response batch (…
Browse files Browse the repository at this point in the history
…#29288)

GitOrigin-RevId: 48d06e256241f5b4c1475b7c9b78be0ff9247523
  • Loading branch information
romanskas authored and MongoDB Bot committed Nov 26, 2024
1 parent fbbe8ea commit dc1671f
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 3 deletions.
4 changes: 4 additions & 0 deletions etc/backports_required_for_multiversion_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -557,6 +557,8 @@ last-continuous:
ticket: SERVER-94635
- test_file: jstests/core/query/project/projection_with_hashed_index.js
ticket: SERVER-91757
- test_file: jstests/change_streams/report_post_batch_resume_token.js
ticket: SERVER-92904
- test_file: jstests/core/ddl/create_indexes.js
ticket: SERVER-90952
- test_file: jstests/core/timeseries/timeseries_filter_extended_range.js
Expand Down Expand Up @@ -1170,6 +1172,8 @@ last-lts:
ticket: SERVER-94635
- test_file: jstests/core/query/project/projection_with_hashed_index.js
ticket: SERVER-91757
- test_file: jstests/change_streams/report_post_batch_resume_token.js
ticket: SERVER-92904
- test_file: jstests/core/ddl/create_indexes.js
ticket: SERVER-90952
- test_file: jstests/core/timeseries/timeseries_filter_extended_range.js
Expand Down
11 changes: 11 additions & 0 deletions jstests/change_streams/report_post_batch_resume_token.js
Original file line number Diff line number Diff line change
Expand Up @@ -224,4 +224,15 @@ assert.gt(bsonWoCompare(txnEvent3._id, previousGetMorePBRT), 0);
// appear in the batch. Confirm that the postBatchResumeToken has been set correctly.
getMorePBRT = csCursor.getResumeToken();
assert.gte(bsonWoCompare(getMorePBRT, txnEvent3._id), 0);

// Test that a batch does not exceed the limit (and throw BSONObjectTooLarge) with a large
// post-batch resume token.
csCursor = testCollection.watch();
const kSecondDocSize = 80 * 1024;
// Here, 4.5 is some "unlucky" (non-unique) weight to provoke an error.
const kFirstDocSize = (16 * 1024 * 1024) - (4.5 * kSecondDocSize);
testCollection.insertMany([{a: "x".repeat(kFirstDocSize)}, {_id: "x".repeat(kSecondDocSize)}]);
assert.doesNotThrow(() => {
csCursor.hasNext();
}, [], "Unexpected exception on 'csCursor.hasNext()'.");
})();
11 changes: 8 additions & 3 deletions src/mongo/db/commands/getmore_cmd.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ class GetMoreCmd final : public Command {
* be returned by this getMore.
*
* Returns true if the cursor should be saved for subsequent getMores, and false otherwise.
* Fills out *numResults with the number of documents in the batch, which must be
* Fills out 'numResults' with the number of documents in the batch, which must be
* initialized to zero by the caller.
*
* Throws an exception on failure.
Expand All @@ -387,9 +387,14 @@ class GetMoreCmd final : public Command {
try {
while (!FindCommon::enoughForGetMore(batchSize, *numResults) &&
PlanExecutor::ADVANCED == (state = exec->getNext(&obj, nullptr))) {
auto nextPostBatchResumeToken = exec->getPostBatchResumeToken();

// If adding this object will cause us to exceed the message size limit, then we
// stash it for later.
if (!FindCommon::haveSpaceForNext(obj, *numResults, nextBatch->bytesUsed())) {
if (!FindCommon::haveSpaceForNext(obj,
*numResults,
nextBatch->bytesUsed() +
nextPostBatchResumeToken.objsize())) {
exec->stashResult(obj);
break;
}
Expand All @@ -398,7 +403,7 @@ class GetMoreCmd final : public Command {
awaitDataState(opCtx).shouldWaitForInserts = false;

// If this executor produces a postBatchResumeToken, add it to the response.
nextBatch->setPostBatchResumeToken(exec->getPostBatchResumeToken());
nextBatch->setPostBatchResumeToken(nextPostBatchResumeToken);

// At this point, we know that there will be at least one document in this
// batch. Reserve an initial estimated number of bytes for the response.
Expand Down

0 comments on commit dc1671f

Please sign in to comment.