From c7010bfc6f74b5bf01d58886efde901411f24bdc Mon Sep 17 00:00:00 2001 From: Victor Ghita Date: Tue, 4 Jul 2023 17:47:07 +0000 Subject: [PATCH 1/2] Created environment for SERVER-71836 --- .../resmokeconfig/suites/change_streams.yml | 3 + jstests/change_streams/large_documents.js | 117 ++++++++++++++++++ .../db/pipeline/document_path_support.cpp | 9 ++ 3 files changed, 129 insertions(+) create mode 100644 jstests/change_streams/large_documents.js diff --git a/buildscripts/resmokeconfig/suites/change_streams.yml b/buildscripts/resmokeconfig/suites/change_streams.yml index 1b8715b57322..577270fd9e73 100644 --- a/buildscripts/resmokeconfig/suites/change_streams.yml +++ b/buildscripts/resmokeconfig/suites/change_streams.yml @@ -3,6 +3,9 @@ test_kind: js_test selector: roots: - jstests/change_streams/**/*.js + exclude_files: + # This file has its own fixture. + - jstests/change_streams/split_large_events.js exclude_with_any_tags: ## # The next tags correspond to the special errors thrown by the diff --git a/jstests/change_streams/large_documents.js b/jstests/change_streams/large_documents.js new file mode 100644 index 000000000000..2bca70afd99e --- /dev/null +++ b/jstests/change_streams/large_documents.js @@ -0,0 +1,117 @@ +/** + * Tests that documentToBsonWithPaths doesn't generate too large documents + * + * @tags: [ + * ] + */ +(function() { +"use strict"; + load("jstests/libs/fixture_helpers.js"); // For 'FixtureHelpers'. + load("jstests/libs/collection_drop_recreate.js"); // For 'assertDropAndRecreateCollection()'. + const dbName = "db"; + const collName = "test"; + + const rst = new ReplSetTest({nodes: 2}); + rst.startSet(); + rst.initiate(); + + const testDB = rst.getPrimary().getDB(dbName); + // assert.commandWorked(testDB.createCollection(collName)); + + // const testDB = db.getSiblingDB(jsTestName()); + // create database for test + // Make sure the collection exists, because some validation might get skipped otherwise. + const testColl = assertDropAndRecreateCollection(testDB, collName); + + // Compute the size for the large strings used in the subsequent tests. + const kLargeStringSize = (16 * 1024 * 1024) - bsonsize({_id: "aaa", a: "x".repeat(100)}) + 1; + + // Insert two large documents into the test collection. + assert.commandWorked(testColl.insertMany([ + {_id: "aaa", a: { + b: { + c: "x".repeat(kLargeStringSize), + d: "blah- blah", + } + }}, + {_id: "bbb", a: { + b: { + c: "x".repeat(kLargeStringSize), + d: "blah- blah", + } + }}, + ])); + + // Enable pre- and post-images. + assert.commandWorked(testDB.runCommand( + {collMod: testColl.getName(), changeStreamPreAndPostImages: {enabled: true}})); + + // Open a change stream without pre- and post-images. + let csCursor = testColl.watch([]); + + // Record a resume token marking the start point of the test. + const testStartToken = csCursor.getResumeToken(); + + // Perform ~16MB updates which generate ~16MB change events and ~16MB post-images. + assert.commandWorked(testColl.update({_id: "aaa"}, {$set: {"a.b.c": "y".repeat(kLargeStringSize)}})); + // assert.commandWorked(testColl.update({_id: "bbb"}, {$set: {a: "y".repeat(kLargeStringSize)}})); + + { + // Test that without pre- and post- images the $changeStreamSplitLargeEvent stage is not + // required. + assert.soon(() => csCursor.hasNext()); + const fullEvent = csCursor.next(); + assert.eq("aaa", fullEvent.documentKey._id); + assert(!fullEvent.splitEvent); + } + + { + // Test that projecting over a document with a very large size actually works + const csCursor = testColl.watch( + [ + { + $match: { + "fullDocument": {$exists: true}, + } + }, + { + $project: { + "fullDocument.a.b.d": 1 + } + }, + ], + { + fullDocument: "required", + fullDocumentBeforeChange: "required", + resumeAfter: testStartToken + } + ); + assert.soon(() => csCursor.hasNext()) + const fullEvent = csCursor.next(); + jsTestLog("The fullEvent doc: '" + JSON.stringify(fullEvent.fullDocument)); + } + + { + // Test that no projecting would throw an error becase bson is too large + assert.throwsWithCode(() => { + const csCursor = testColl.watch( + [ + { + $match: { + "fullDocument": {$exists: true}, + } + } + ], + { + fullDocument: "required", + fullDocumentBeforeChange: "required", + resumeAfter: testStartToken + } + ); + csCursor.hasNext(); + }, ErrorCodes.BSONObjectTooLarge); + } + + + rst.stopSet(); +}()); diff --git a/src/mongo/db/pipeline/document_path_support.cpp b/src/mongo/db/pipeline/document_path_support.cpp index 6ea0ad64e692..e0c7e1a68a13 100644 --- a/src/mongo/db/pipeline/document_path_support.cpp +++ b/src/mongo/db/pipeline/document_path_support.cpp @@ -46,11 +46,15 @@ #include "mongo/util/assert_util_core.h" #include "mongo/util/str.h" +#include "mongo/logv2/log.h" + namespace mongo { namespace document_path_support { namespace { +#define MONGO_LOGV2_DEFAULT_COMPONENT ::mongo::logv2::LogComponent::kDefault + /** * If 'value' is an array, invokes 'callback' once on each element of 'value'. Otherwise, if 'value' * is not missing, invokes 'callback' on 'value' itself. @@ -149,6 +153,11 @@ StatusWith extractElementAlongNonArrayPath(const Document& doc, const Fie void documentToBsonWithPaths(const Document& input, const OrderedPathSet& paths, BSONObjBuilder* builder) { + LOGV2_INFO(999999997, + "Entered 'documentToBsonWithPaths' with params: doc - {document}; paths: {paths}}", + "document"_attr = input, + "paths"_attr = paths); + for (auto&& path : paths) { // getNestedField does not handle dotted paths correctly, so instead of retrieving the // entire path, we just extract the first element of the path. From d903e45dcac35b8f5e2a4e1dac3bcd6789cca793 Mon Sep 17 00:00:00 2001 From: Victor Ghita Date: Fri, 7 Jul 2023 16:04:03 +0000 Subject: [PATCH 2/2] Benchmark between prototyped solution vs original --- jstests/change_streams/large_documents.js | 71 ++++- ...aggregation_document_to_bson_with_paths.js | 63 +++++ src/mongo/db/pipeline/SConscript | 15 + .../db/pipeline/document_path_support.cpp | 39 ++- src/mongo/db/pipeline/document_path_support.h | 19 ++ .../db/pipeline/document_path_support_bm.cpp | 260 ++++++++++++++++++ 6 files changed, 451 insertions(+), 16 deletions(-) create mode 100644 jstests/noPassthrough/aggregation_document_to_bson_with_paths.js create mode 100644 src/mongo/db/pipeline/document_path_support_bm.cpp diff --git a/jstests/change_streams/large_documents.js b/jstests/change_streams/large_documents.js index 2bca70afd99e..5839e20f843a 100644 --- a/jstests/change_streams/large_documents.js +++ b/jstests/change_streams/large_documents.js @@ -31,13 +31,13 @@ {_id: "aaa", a: { b: { c: "x".repeat(kLargeStringSize), - d: "blah- blah", + d: ["blah- blah"], } }}, {_id: "bbb", a: { b: { c: "x".repeat(kLargeStringSize), - d: "blah- blah", + d: ["blah- blah"], } }}, ])); @@ -54,16 +54,16 @@ // Perform ~16MB updates which generate ~16MB change events and ~16MB post-images. assert.commandWorked(testColl.update({_id: "aaa"}, {$set: {"a.b.c": "y".repeat(kLargeStringSize)}})); - // assert.commandWorked(testColl.update({_id: "bbb"}, {$set: {a: "y".repeat(kLargeStringSize)}})); + assert.commandWorked(testColl.update({_id: "bbb"}, {$set: {"a.b.c": "y".repeat(kLargeStringSize)}})); - { - // Test that without pre- and post- images the $changeStreamSplitLargeEvent stage is not - // required. - assert.soon(() => csCursor.hasNext()); - const fullEvent = csCursor.next(); - assert.eq("aaa", fullEvent.documentKey._id); - assert(!fullEvent.splitEvent); - } + // { + // // Test that without pre- and post- images the $changeStreamSplitLargeEvent stage is not + // // required. + // assert.soon(() => csCursor.hasNext()); + // const fullEvent = csCursor.next(); + // assert.eq("aaa", fullEvent.documentKey._id); + // assert(!fullEvent.splitEvent); + // } { // Test that projecting over a document with a very large size actually works @@ -88,7 +88,7 @@ ); assert.soon(() => csCursor.hasNext()) const fullEvent = csCursor.next(); - jsTestLog("The fullEvent doc: '" + JSON.stringify(fullEvent.fullDocument)); + // jsTestLog("The fullEvent doc: '" + JSON.stringify(fullEvent.fullDocument)); } { @@ -112,6 +112,53 @@ }, ErrorCodes.BSONObjectTooLarge); } + // { + // // Test that sorting won't fail + // const csCursor = testColl.watch( + // [ + // { + // $project: { + // "fullDocument.a.b.d": 1, + // "fullDocumentBeforeChange.a.b.d": 1 + // } + // }, + // ], + // { + // fullDocument: "required", + // fullDocumentBeforeChange: "required", + // resumeAfter: testStartToken + // } + // ); + // assert.soon(() => csCursor.hasNext()) + // } + + // { + // // Test that sorting won't fail + // const csCursor = testColl.watch( + // [ + // { + // $addFields: { + // "fullDocument.a.b.e": "q".repeat(kLargeStringSize), + // } + // }, + // { + // $project: { + // "fullDocument.a.b.d": 1, + // "fullDocumentBeforeChange.a.b.d": 1 + // } + // }, + // ], + // { + // fullDocument: "required", + // fullDocumentBeforeChange: "required", + // resumeAfter: testStartToken + // } + // ); + // assert.soon(() => csCursor.hasNext()) + + // const fullEvent = csCursor.next(); + // jsTestLog("The fullEvent doc: '" + JSON.stringify(fullEvent.fullDocument)); + // } rst.stopSet(); }()); diff --git a/jstests/noPassthrough/aggregation_document_to_bson_with_paths.js b/jstests/noPassthrough/aggregation_document_to_bson_with_paths.js new file mode 100644 index 000000000000..a770f3ed5ada --- /dev/null +++ b/jstests/noPassthrough/aggregation_document_to_bson_with_paths.js @@ -0,0 +1,63 @@ +(function() { + "use strict"; + load("jstests/libs/fixture_helpers.js"); // For 'FixtureHelpers'. + load("jstests/libs/collection_drop_recreate.js"); // For 'assertDropAndRecreateCollection()'. + const dbName = "db"; + const collName = "test"; + + const rst = new ReplSetTest({nodes: 1}); + rst.startSet(); + rst.initiate(); + + const testDB = rst.getPrimary().getDB(dbName); + const testColl = assertDropAndRecreateCollection(testDB, collName); + + // const mongodPid = testDB._adminCommand("serverStatus").pid.valueOf().toString(); + // jsTest.log("You can attach now to process " + mongodPid); + // jsTest.log("Press ENTER to continue..."); + // runProgram("python", "-c", "input()"); + + + // Compute the size for the large strings used in the subsequent tests. + const kLargeStringSize = (16 * 1024 * 1024) - bsonsize({small: "blih-blih", large: "t" + .repeat(100)}) + 1; + + // Insert two large documents into the test collection. + assert.commandWorked(testColl.insertMany([ + {a: + { + large: "a".repeat(kLargeStringSize), + small: ["blah-blah", "blah-blah"] + }, + } + ])); + + { + // Trying to project docs with >16MB should work + const cursor = testColl.aggregate([ + { + $addFields: { + "a.c": { + large: "c".repeat(kLargeStringSize), + small: "blch-blch" + } + }, + }, + { + $sort: { + // "a.large": 1, + "a.small": 1, + // "a.c.small": 1 + } + }, + { + $project: { + "a.small": 1, + } + } + ]).toArray(); + assert.gte(cursor.length, 1) + } + + rst.stopSet(); + }()); diff --git a/src/mongo/db/pipeline/SConscript b/src/mongo/db/pipeline/SConscript index ef4d805a6094..d90d68e6dfad 100644 --- a/src/mongo/db/pipeline/SConscript +++ b/src/mongo/db/pipeline/SConscript @@ -210,6 +210,21 @@ env.Library( ], ) +env.Benchmark( + target="document_path_support_bm", + source=[ + 'document_path_support_bm.cpp', + ], + LIBDEPS=[ + '$BUILD_DIR/mongo/base', + '$BUILD_DIR/mongo/db/common', + '$BUILD_DIR/mongo/db/exec/document_value/document_value', + '$BUILD_DIR/mongo/logv2/logv2_options', + 'dependencies', + 'document_path_support', + ], +) + env.Library( target="change_stream_error_extra_info", source=[ diff --git a/src/mongo/db/pipeline/document_path_support.cpp b/src/mongo/db/pipeline/document_path_support.cpp index e0c7e1a68a13..653c12c1a174 100644 --- a/src/mongo/db/pipeline/document_path_support.cpp +++ b/src/mongo/db/pipeline/document_path_support.cpp @@ -48,6 +48,7 @@ #include "mongo/logv2/log.h" + namespace mongo { namespace document_path_support { @@ -153,10 +154,9 @@ StatusWith extractElementAlongNonArrayPath(const Document& doc, const Fie void documentToBsonWithPaths(const Document& input, const OrderedPathSet& paths, BSONObjBuilder* builder) { - LOGV2_INFO(999999997, - "Entered 'documentToBsonWithPaths' with params: doc - {document}; paths: {paths}}", - "document"_attr = input, - "paths"_attr = paths); + // LOGV2_INFO(999999997, + // "Entered 'documentToBsonWithPaths' with params: paths: {paths}}", + // "paths"_attr = paths); for (auto&& path : paths) { // getNestedField does not handle dotted paths correctly, so instead of retrieving the @@ -169,5 +169,36 @@ void documentToBsonWithPaths(const Document& input, } } +void documentToBsonWithPathsDeep(const Document& input, + const OrderedPathSet& paths, + BSONObjBuilder* builder) { + // LOGV2_INFO(999999997, + // "Entered 'documentToBsonWithPathsDeep' with params: paths: {paths}}", + // "paths"_attr = paths); + + for (auto&& path : paths) { + documentToBsonWithPathsSubroutine(input, path, builder); + } +} + +void documentToBsonWithPathsSubroutine(const Document& doc, + const StringData path, + BSONObjBuilder* builder) { + const auto prefix = FieldPath::extractFirstFieldFromDottedPath(path); + // Avoid adding the same prefix twice. + if (!builder->hasField(prefix)) { + // if we still need to go deeper in the obeject's structure + const auto& subDocument = doc.getField(prefix); + // if the object is an array or we no longer need to get one level down + if (prefix != path && !subDocument.isArray()) { + BSONObjBuilder innerObj(builder->subobjStart(prefix)); + documentToBsonWithPathsSubroutine( + subDocument.getDocument(), path.substr(path.find('.') + 1), &innerObj); + innerObj.doneFast(); + } else { + subDocument.addToBsonObj(builder, prefix); + } + } +} } // namespace document_path_support } // namespace mongo diff --git a/src/mongo/db/pipeline/document_path_support.h b/src/mongo/db/pipeline/document_path_support.h index db3bf461a50d..70f8cfef9b4e 100644 --- a/src/mongo/db/pipeline/document_path_support.h +++ b/src/mongo/db/pipeline/document_path_support.h @@ -69,6 +69,18 @@ StatusWith extractElementAlongNonArrayPath(const Document& doc, const Fie */ void documentToBsonWithPaths(const Document&, const OrderedPathSet& paths, BSONObjBuilder* builder); +void documentToBsonWithPathsDeep(const Document&, + const OrderedPathSet& paths, + BSONObjBuilder* builder); + +/** + * Subroutine used by `documentToBsonWithPaths`, used to extract one single path to append one path + * from the input document the the object builder + */ +void documentToBsonWithPathsSubroutine(const Document& doc, + StringData path, + BSONObjBuilder* builder); + template BSONObj documentToBsonWithPaths(const Document& input, const OrderedPathSet& paths) { BSONObjBuilder outputBuilder; @@ -76,6 +88,13 @@ BSONObj documentToBsonWithPaths(const Document& input, const OrderedPathSet& pat return outputBuilder.obj(); } +template +BSONObj documentToBsonWithPathsDeep(const Document& input, const OrderedPathSet& paths) { + BSONObjBuilder outputBuilder; + documentToBsonWithPathsDeep(input, paths, &outputBuilder); + return outputBuilder.obj(); +} + /** * Extracts 'paths' from the input document to a flat document. * diff --git a/src/mongo/db/pipeline/document_path_support_bm.cpp b/src/mongo/db/pipeline/document_path_support_bm.cpp new file mode 100644 index 000000000000..fb1ab9a5c77b --- /dev/null +++ b/src/mongo/db/pipeline/document_path_support_bm.cpp @@ -0,0 +1,260 @@ +/** + * Copyright (C) 2022-present MongoDB, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + * + * As a special exception, the copyright holders give permission to link the + * code of portions of this program with the OpenSSL library under certain + * conditions as described in each individual source file and distribute + * linked combinations including the program with the OpenSSL library. You + * must comply with the Server Side Public License in all respects for + * all of the code used other than as permitted herein. If you modify file(s) + * with this exception, you may extend this exception to your version of the + * file(s), but you are not obligated to do so. If you do not wish to do so, + * delete this exception statement from your version. If you delete this + * exception statement from all source files in the program, then also delete + * it in the license file. + */ + +#include +#include +#include +#include + +#include + +#include "mongo/base/error_codes.h" +#include "mongo/base/string_data.h" +#include "mongo/bson/bsonmisc.h" +#include "mongo/bson/bsonobj.h" +#include "mongo/bson/bsonobjbuilder.h" +#include "mongo/bson/json.h" +#include "mongo/bson/util/builder.h" +#include "mongo/db/exec/document_value/document.h" +#include "mongo/db/exec/document_value/document_internal.h" +#include "mongo/db/exec/document_value/document_value_test_util.h" +#include "mongo/db/exec/document_value/value.h" +#include "mongo/db/exec/document_value/value_comparator.h" +#include "mongo/db/pipeline/document_path_support.h" +#include "mongo/db/pipeline/field_path.h" +#include "mongo/util/assert_util_core.h" + +#include "mongo/logv2/log.h" + + +namespace mongo { +namespace { + +#define MONGO_LOGV2_DEFAULT_COMPONENT ::mongo::logv2::LogComponent::kDefault + + +/** + * Generates a tree object, where on each level the parent contains exactly one nested BSON object, + * which behaves the same, recursively. For example, for numberOfLeaves = 3, the result is + * { + * "a": { + * "a": { + * "a": "AAA..." + * } + * } + * } + */ +BSONObj generateOneLinerTree(size_t numOfLevels) { + // Speed-up data generation by re-using previous result. + static const std::string leafValue(128, 'A'); + static BSONObj result = BSON("a" << leafValue); + static size_t resultSize = 1; + + invariant(numOfLevels >= 1); + + if (resultSize > numOfLevels) { + // Reset if cannot re-use the previous result. + result = BSON("a" << leafValue); + resultSize = 1; + } + + for (; resultSize < numOfLevels; ++resultSize) { + result = BSON("a" << result); + } + return result; +} + +BSONObj generateSomewhatBalancedTree(size_t numOfLevels) { + // Speed-up data generation by re-using previous result. + static const std::string leafValueA(2, 'A'); + static const std::string leafValueB(2, 'B'); + static const std::string leafValueC(2, 'C'); + static BSONObj result = BSON("a" << leafValueA << "b" << leafValueB << "c" << leafValueC); + static size_t resultSize = 1; + static const std::string alphabet{"abcdefghijklmnopqrstuvwxyz"}; + + invariant(numOfLevels >= 1); + + if (resultSize > numOfLevels) { + // Reset if cannot re-use the previous result. + result = BSON("a" << leafValueA << "b" << leafValueB << "c" << leafValueC); + resultSize = 1; + } + + // LOGV2_INFO(99999998, "NumOfLevels: {numOfLevels}", "numOfLevels"_attr = numOfLevels); + + for (; resultSize < numOfLevels; ++resultSize) { + result = BSON("a" << result << "b" << result << "c" << result); + } + return result; +} + +} // namespace + +const ValueComparator kDefaultValueComparator{}; + +void BM_documentToBsonWithPathsLinearTree(benchmark::State& state) { + Document doc{generateOneLinerTree(state.range(0))}; + for (auto _ : state) { + auto setOfPaths = OrderedPathSet{}; + // generate the path "a.a.a.a..." + std::stringstream ss; + ss << "a"; + for (auto i = 0; i < state.range(0) - 1; ++i) + ss << ".a"; + setOfPaths.insert(ss.str()); + state.counters["objsize"] = + document_path_support::documentToBsonWithPaths(doc, setOfPaths).objsize(); + } +} + +void BM_documentToBsonWithPathsDeepLinearTree(benchmark::State& state) { + Document doc{generateOneLinerTree(state.range(0))}; + for (auto _ : state) { + auto setOfPaths = OrderedPathSet{}; + // generate the path "a.a.a.a..." + std::stringstream ss; + ss << "a"; + for (auto i = 0; i < state.range(0) - 1; ++i) + ss << ".a"; + setOfPaths.insert(ss.str()); + state.counters["objsize"] = + document_path_support::documentToBsonWithPathsDeep(doc, setOfPaths).objsize(); + } +} + +void BM_documentToBsonWithPathsBalancedTreeAndEqualDepthPath(benchmark::State& state) { + Document doc{generateSomewhatBalancedTree(state.range(0))}; + for (auto _ : state) { + auto setOfPaths = OrderedPathSet{}; + // generate the path "a.a.a.a..." + std::stringstream ss; + ss << "a"; + for (auto i = 0; i < state.range(0) - 1; ++i) + ss << ".a"; + setOfPaths.insert(ss.str()); + state.counters["objsize"] = + document_path_support::documentToBsonWithPaths(doc, setOfPaths).objsize(); + } +} + +void BM_documentToBsonWithPathsDeepBalancedTreeAndEqualDepthPath(benchmark::State& state) { + Document doc{generateSomewhatBalancedTree(state.range(0))}; + for (auto _ : state) { + auto setOfPaths = OrderedPathSet{}; + // generate the path "a.a.a.a..." + std::stringstream ss; + ss << "a"; + for (auto i = 0; i < state.range(0) - 1; ++i) + ss << ".a"; + setOfPaths.insert(ss.str()); + + // LOGV2_INFO(99999998, "Path: {path}", "path"_attr = ss.str()); + // LOGV2_INFO(99999998, "Object: {obj}", "obj"_attr = doc.toBson().toString()); + state.counters["objsize"] = + document_path_support::documentToBsonWithPathsDeep(doc, setOfPaths).objsize(); + } +} + +void BM_documentToBsonWithPathsBalancedTreeAndHalfDepthPath(benchmark::State& state) { + Document doc{generateSomewhatBalancedTree(state.range(0))}; + for (auto _ : state) { + auto setOfPaths = OrderedPathSet{}; + // generate the path "a.a.a.a..." + std::stringstream ss; + ss << "a"; + for (auto i = 0; i < state.range(0) / 2; ++i) + ss << ".a"; + setOfPaths.insert(ss.str()); + state.counters["objsize"] = + document_path_support::documentToBsonWithPaths(doc, setOfPaths).objsize(); + } +} + +void BM_documentToBsonWithPathsDeepBalancedTreeAndHalfDepthPath(benchmark::State& state) { + Document doc{generateSomewhatBalancedTree(state.range(0))}; + for (auto _ : state) { + auto setOfPaths = OrderedPathSet{}; + // generate the path "a.a.a.a..." + std::stringstream ss; + ss << "a"; + for (auto i = 0; i < state.range(0) / 2; ++i) + ss << ".a"; + setOfPaths.insert(ss.str()); + state.counters["objsize"] = + document_path_support::documentToBsonWithPathsDeep(doc, setOfPaths).objsize(); + } +} + +void BM_documentToBsonWithPathsBalancedTreeAndOneLengthDepthPath(benchmark::State& state) { + Document doc{generateSomewhatBalancedTree(state.range(0))}; + for (auto _ : state) { + std::string path{"a"}; + state.counters["objsize"] = + document_path_support::documentToBsonWithPaths(doc, {path}).objsize(); + } +} + +void BM_documentToBsonWithPathsDeepBalancedTreeAndOneLengthDepthPath(benchmark::State& state) { + Document doc{generateSomewhatBalancedTree(state.range(0))}; + for (auto _ : state) { + std::string path{"a"}; + state.counters["objsize"] = + document_path_support::documentToBsonWithPathsDeep(doc, {path}).objsize(); + } +} + +static const size_t start_linear = 200; +static const size_t end_linear = 6'000; +static const size_t start_balanced = 3; +static const size_t end_balanced = 12; + +BENCHMARK(BM_documentToBsonWithPathsLinearTree) + ->Range(start_linear, end_linear) + ->Unit(benchmark::kMillisecond); +BENCHMARK(BM_documentToBsonWithPathsDeepLinearTree) + ->Range(start_linear, end_linear) + ->Unit(benchmark::kMillisecond); + +BENCHMARK(BM_documentToBsonWithPathsBalancedTreeAndEqualDepthPath) + ->Range(start_balanced, end_balanced) + ->Unit(benchmark::kMillisecond); +BENCHMARK(BM_documentToBsonWithPathsDeepBalancedTreeAndEqualDepthPath) + ->Range(start_balanced, end_balanced) + ->Unit(benchmark::kMillisecond); + +BENCHMARK(BM_documentToBsonWithPathsDeepBalancedTreeAndHalfDepthPath) + ->Range(start_balanced, end_balanced) + ->Unit(benchmark::kMillisecond); + +BENCHMARK(BM_documentToBsonWithPathsDeepBalancedTreeAndOneLengthDepthPath) + ->Range(start_balanced, end_balanced) + ->Unit(benchmark::kMillisecond); + +} // namespace mongo