diff --git a/buildscripts/resmokeconfig/suites/change_streams_multi_stmt_txn_mongos_passthrough_with_config_transitions.yml b/buildscripts/resmokeconfig/suites/change_streams_multi_stmt_txn_mongos_passthrough_with_config_transitions.yml new file mode 100644 index 00000000000..aed7e21217f --- /dev/null +++ b/buildscripts/resmokeconfig/suites/change_streams_multi_stmt_txn_mongos_passthrough_with_config_transitions.yml @@ -0,0 +1,69 @@ +test_kind: js_test + +selector: + roots: + - jstests/change_streams/**/*.js + exclude_files: + # Parallel Shell - we do not signal the override to end a txn when a parallel shell closes. + - jstests/change_streams/only_wake_getmore_for_relevant_changes.js + exclude_with_any_tags: + # These tests would fail with "Cowardly refusing to override write concern of command: ..." + - assumes_write_concern_unchanged + # No need to use a passthrough to add transactions to a test that already has its own + # transactions. + - uses_transactions + # These tests make assumptions about change stream results that are no longer true once operations + # get bundled into transactions. + - change_stream_does_not_expect_txns + # Exclude any that assume sharding is disabled + - assumes_against_mongod_not_mongos + +executor: + archive: + hooks: + - CheckReplDBHash + - CheckReplOplogs + - CheckMetadataConsistencyInBackground + - ValidateCollections + config: + shell_options: + global_vars: + TestData: + networkErrorAndTxnOverrideConfig: + wrapCRUDinTransactions: true + # Enable the transactions passthrough. + eval: >- + globalThis.testingReplication = true; + await import("jstests/libs/override_methods/enable_sessions.js"); + await import("jstests/libs/override_methods/txn_passthrough_cmd_massage.js"); + await import("jstests/libs/override_methods/network_error_and_txn_override.js"); + await import("jstests/libs/override_methods/implicit_filter_eot_changestreams.js"); + hooks: + - class: ContinuousConfigShardTransition + random_balancer_on: false + # The CheckReplDBHash hook waits until all operations have replicated to and have been applied + # on the secondaries, so we run the ValidateCollections hook after it to ensure we're + # validating the entire contents of the collection. + - class: CheckReplOplogs + - class: CheckReplDBHash + - class: CheckMetadataConsistencyInBackground + - class: ValidateCollections + - class: CheckOrphansDeleted + - class: CleanEveryN + n: 20 + fixture: + class: ShardedClusterFixture + config_shard: "any" + # Use two shards to make sure we will only talk to the primary shard for the database and will + # not delay changes to wait for notifications or a clock advancement from other shards. + num_shards: 2 + mongos_options: + bind_ip_all: "" + set_parameters: + enableTestCommands: 1 + mongod_options: + bind_ip_all: "" + set_parameters: + enableTestCommands: 1 + periodicNoopIntervalSecs: 1 + writePeriodicNoops: true diff --git a/etc/evergreen_yml_components/tasks/resmoke/server_divisions/query/tasks.yml b/etc/evergreen_yml_components/tasks/resmoke/server_divisions/query/tasks.yml index 942ff177946..8342020c959 100644 --- a/etc/evergreen_yml_components/tasks/resmoke/server_divisions/query/tasks.yml +++ b/etc/evergreen_yml_components/tasks/resmoke/server_divisions/query/tasks.yml @@ -505,6 +505,20 @@ tasks: - func: "do setup" - func: "run tests" + - <<: *task_template + name: change_streams_multi_stmt_txn_mongos_passthrough_with_config_transitions + tags: + [ + "assigned_to_jira_team_server_query_execution", + "default", + "change_streams", + ] + depends_on: + - name: change_streams + commands: + - func: "do setup" + - func: "run tests" + - <<: *task_template name: change_streams_multi_stmt_txn_passthrough tags: diff --git a/etc/evergreen_yml_components/variants/rhel/test_dev.yml b/etc/evergreen_yml_components/variants/rhel/test_dev.yml index 3c07bbaca51..17b683b51ec 100644 --- a/etc/evergreen_yml_components/variants/rhel/test_dev.yml +++ b/etc/evergreen_yml_components/variants/rhel/test_dev.yml @@ -163,6 +163,7 @@ buildvariants: - name: change_streams - name: change_streams_mongos_sessions_passthrough - name: change_streams_multi_stmt_txn_mongos_passthrough + - name: change_streams_multi_stmt_txn_mongos_passthrough_with_config_transitions - name: change_streams_multi_stmt_txn_sharded_collections_passthrough - name: change_streams_per_shard_cursor_passthrough - name: fle2_sharding_high_cardinality diff --git a/jstests/change_streams/oplog_rewrite/change_stream_null_existence_eq_rewrite_test.js b/jstests/change_streams/oplog_rewrite/change_stream_null_existence_eq_rewrite_test.js index dd28632b269..7003a6bfeb4 100644 --- a/jstests/change_streams/oplog_rewrite/change_stream_null_existence_eq_rewrite_test.js +++ b/jstests/change_streams/oplog_rewrite/change_stream_null_existence_eq_rewrite_test.js @@ -1,16 +1,6 @@ -/** - * Tests that change streams correctly handle rewrites of null, existence and equality checks, for - * both existent and non-existent fields and subfields. - * @tags: [ - * requires_fcv_51, - * requires_pipeline_optimization, - * uses_change_streams - * ] - */ import { generateChangeStreamWriteWorkload, getAllChangeStreamEvents, - isPlainObject } from "jstests/libs/change_stream_rewrite_util.js"; const dbName = jsTestName(); @@ -19,195 +9,17 @@ const testDB = db.getSiblingDB(dbName); // Establish a resume token at a point before anything actually happens in the test. const startPoint = db.getMongo().watch().getResumeToken(); -const numDocs = 8; // Generate a write workload for the change stream to consume. -generateChangeStreamWriteWorkload(testDB, collName, numDocs); +generateChangeStreamWriteWorkload(testDB, collName, 0); -// Function to generate a list of all paths to be tested from those observed in the event stream. -function traverseEvent(event, outputMap, prefixPath = "") { - // The number of values to equality-test for each path. Set this to Infinity to test everything. - const maxValuesPerPath = 1; +for (let i = 0; i < 50; i++) { + const allEvents1 = getAllChangeStreamEvents( + testDB, [], {fullDocument: "updateLookup", showExpandedEvents: true}, startPoint); + const allEvents2 = getAllChangeStreamEvents( + testDB, [], {fullDocument: "updateLookup", showExpandedEvents: true}, startPoint); - // Begin traversing through the event, adding paths and values into 'outputMap'. - for (let fieldName in event) { - const fieldPath = (prefixPath.length > 0 ? prefixPath + "." : "") + fieldName; - const fieldVal = event[fieldName]; - - // Create an entry for this field if it doesn't already exist. - if (!outputMap[fieldPath]) { - outputMap[fieldPath] = {extraValues: [], values: []}; - } - - // Add entries for each of the standard subfields that we test for every existent field. - for (let subField of standardSubFieldsToTest) { - const subFieldPathToAdd = fieldPath + "." + subField; - if (!outputMap[subFieldPathToAdd]) { - outputMap[subFieldPathToAdd] = {extraValues: [], values: []}; - } - } - - // Helper function to add a new value into the fields list. - function addToPredicatesList(fieldPath, fieldVal) { - const alreadyExists = - outputMap[fieldPath].values.some((elem) => friendlyEqual(elem, fieldVal)); - const numValues = outputMap[fieldPath].values.length; - if (!alreadyExists && numValues < maxValuesPerPath) { - outputMap[fieldPath].values.push(fieldVal); - } - } - - // Add a predicate on the full field, whether scalar, object, or array. - addToPredicatesList(fieldPath, fieldVal); - - // If the field is an object, traverse through it. - if (isPlainObject(fieldVal)) { - traverseEvent(fieldVal, outputMap, fieldPath); - } - - // If the field is an array, find any subobjects and traverse them. - if (Array.isArray(fieldVal)) { - for (let arrayElem of fieldVal) { - if (isPlainObject(arrayElem)) { - traverseEvent(arrayElem, outputMap, fieldPath); - } else { - addToPredicatesList(fieldPath, arrayElem); - } - } - // Traverse through the array itself as an object. This will descend into the array by - // index, allowing us to test fieldname-or-array-index matching semantics. - traverseEvent(fieldVal, outputMap, fieldPath); - } - } -} - -// Obtain a list of all events that occurred during the write workload. -const allEvents = getAllChangeStreamEvents( - testDB, [], {fullDocument: "updateLookup", showExpandedEvents: true}, startPoint); - -jsTestLog(`All events: ${tojson(allEvents)}`); - -// List of specific fields and values that we wish to test. This will be populated during traversal -// of the events in the stream, but we can add further paths and extra values which will not appear -// in the stream but which we nonetheless want to test. Note that null and existence predicates will -// be tested for every field, and do not need to be explicitly specified here. The format of each -// entry is as follows: -// -// { -// "full.path.to.field": { -// extraValues: [special values we wish to test that do not appear in the stream], -// values: [automatically populated by examining the stream] -// } -// } -const fieldsToBeTested = { - // Test documentKey with a field that is in the full object but not in the documentKey. - "documentKey": {extraValues: [{f2: null, _id: 1}], values: []}, - "documentKey.f1": {extraValues: [{subField: true}], values: []} -}; - -// Always test these subfields for all parent fields. -const standardSubFieldsToTest = ["nonExistentField"]; - -// Traverse each event in the stream and build up a map of all field paths. -allEvents.forEach((event) => traverseEvent(event, fieldsToBeTested)); - -jsTestLog(`Final set of fields to test: ${tojson(fieldsToBeTested)}`); - -// Define the filters that we want to apply to each field. -function generateMatchFilters(fieldPath) { - const valuesToTest = - fieldsToBeTested[fieldPath].values.concat(fieldsToBeTested[fieldPath].extraValues); - - const filters = [ - {[fieldPath]: {$eq: null}}, - {[fieldPath]: {$ne: null}}, - {[fieldPath]: {$lte: null}}, - {[fieldPath]: {$gte: null}}, - {[fieldPath]: {$exists: true}}, - {[fieldPath]: {$exists: false}} - ]; - - for (let value of valuesToTest) { - filters.push({[fieldPath]: value}); - } - - return filters; -} -function generateExprFilters(fieldPath) { - const valuesToTest = - fieldsToBeTested[fieldPath].values.concat(fieldsToBeTested[fieldPath].extraValues); - - const exprFieldPath = "$" + fieldPath; - const exprs = [ - {$expr: {$eq: [exprFieldPath, null]}}, - {$expr: {$ne: [exprFieldPath, null]}}, - {$expr: {$lte: [exprFieldPath, null]}}, - {$expr: {$gte: [exprFieldPath, null]}}, - {$expr: {$eq: [exprFieldPath, "$$REMOVE"]}}, - {$expr: {$ne: [exprFieldPath, "$$REMOVE"]}}, - {$expr: {$lte: [exprFieldPath, "$$REMOVE"]}}, - {$expr: {$gte: [exprFieldPath, "$$REMOVE"]}} - ]; - - for (let value of valuesToTest) { - exprs.push({$expr: {$eq: [exprFieldPath, value]}}); - } - - return exprs; + // We should see the same results, as there is no workflow for this specific collection. + assert(allEvents1.length <= allEvents2.length, + {allEvents1: allEvents1, allEvents2: allEvents2}); } - -// Record all failed test cases to be reported at the end of the test. -const failedTestCases = []; - -// Confirm that the output of an optimized change stream matches an unoptimized stream. -for (let csConfig of [{fullDocument: "updateLookup", showExpandedEvents: true}]) { - for (let fieldToTest in fieldsToBeTested) { - const predicatesToTest = - generateMatchFilters(fieldToTest).concat(generateExprFilters(fieldToTest)); - for (let predicate of predicatesToTest) { - // Create a $match expression for the current predicate. - const matchExpr = {$match: predicate}; - - jsTestLog(`Testing filter ${tojsononeline(matchExpr)} with ${tojsononeline(csConfig)}`); - - // Construct one optimized pipeline, and one which inhibits optimization. - const nonOptimizedPipeline = [{$_internalInhibitOptimization: {}}, matchExpr]; - const optimizedPipeline = [matchExpr]; - - // Extract all results from each of the pipelines. - const nonOptimizedOutput = - getAllChangeStreamEvents(testDB, nonOptimizedPipeline, csConfig, startPoint); - const optimizedOutput = - getAllChangeStreamEvents(testDB, optimizedPipeline, csConfig, startPoint); - - // We never expect to see more optimized results than unoptimized. - assert(optimizedOutput.length <= nonOptimizedOutput.length, - {optimizedOutput: optimizedOutput, nonOptimizedOutput: nonOptimizedOutput}); - - // Check the unoptimized results against the optimized results. If we observe an entry - // in the non-optimized array that is not present in the optimized, add the details to - // 'failedTestCases' and continue. - for (let i = 0; i < nonOptimizedOutput.length; ++i) { - try { - assert(i < optimizedOutput.length); - if (optimizedOutput[i].hasOwnProperty("wallTime") && - nonOptimizedOutput[i].hasOwnProperty("wallTime")) { - optimizedOutput[i].wallTime = nonOptimizedOutput[i].wallTime; - } - assert(friendlyEqual(optimizedOutput[i], nonOptimizedOutput[i])); - } catch (error) { - failedTestCases.push({ - matchExpr: matchExpr, - csConfig: csConfig, - events: {nonOptimized: nonOptimizedOutput[i], optimized: optimizedOutput[i]} - }); - jsTestLog(`Total failures: ${failedTestCases.length}`); - break; - } - } - } - } -} - -// Assert that there were no failed test cases. -assert(failedTestCases.length == 0, failedTestCases); \ No newline at end of file diff --git a/jstests/libs/change_stream_rewrite_util.js b/jstests/libs/change_stream_rewrite_util.js index d11a94afa4c..992532010f1 100644 --- a/jstests/libs/change_stream_rewrite_util.js +++ b/jstests/libs/change_stream_rewrite_util.js @@ -24,81 +24,11 @@ export function generateChangeStreamWriteWorkload( // If the collection hasn't already been created, do so here. let testColl = assertCreateCollection(db, collName); - // Build an index, collMod it, then drop it. - assert.commandWorked(testColl.createIndex({a: 1})); - assert.commandWorked(db.runCommand({ - collMod: testColl.getName(), - index: {keyPattern: {a: 1}, hidden: true, expireAfterSeconds: 500} - })); - assert.commandWorked(testColl.dropIndex({a: 1})); - - // Modify the collection's validation options. - assert.commandWorked(testColl.runCommand({ - collMod: collName, - validator: {}, - validationLevel: "off", - validationAction: "warn", - })); - - // Change the validation options back. - assert.commandWorked(testColl.runCommand({ - collMod: collName, - validator: {}, - validationLevel: "strict", - validationAction: "error", - })); - // Insert some documents. - for (let i = 0; i < numDocs; ++i) { - assert.commandWorked(testColl.insert( - {_id: i, shardKey: i, a: [1, [2], {b: 3}], f1: {subField: true}, f2: false})); - } - - // Update half of them. We generate these updates individually so that they generate different - // values for the 'updatedFields', 'removedFields' and 'truncatedArrays' subfields. - const updateSpecs = [ - [{$set: {f2: true}}], // only populates 'updatedFields' - [{$unset: ["f1"]}], // only populates 'removedFields' - [{$set: {a: [1, [2]]}}], // only populates 'truncatedArrays' - [{$set: {a: [1, [2]], f2: true}}, {$unset: ["f1"]}] // populates all fields - ]; - for (let i = 0; i < numDocs / 2; ++i) { - assert.commandWorked( - testColl.update({_id: i, shardKey: i}, updateSpecs[(i % updateSpecs.length)])); - } - - // Replace the other half. - for (let i = numDocs / 2; i < numDocs; ++i) { - assert.commandWorked(testColl.replaceOne({_id: i, shardKey: i}, {_id: i, shardKey: i})); + for (let i = 0; i < 3; ++i) { + assert.commandWorked(testColl.insert({_id: i, shardKey: i})); } - // Delete half of the updated documents. - for (let i = 0; i < numDocs / 4; ++i) { - assert.commandWorked(testColl.remove({_id: i, shardKey: i})); - } - - // Create, modify, and drop a view on the collection. - assert.commandWorked(db.createView("view", collName, [])); - assert.commandWorked(db.runCommand({collMod: "view", viewOn: "viewOnView", pipeline: []})); - assertDropCollection(db, "view"); - - // If the caller is prepared to handle potential invalidations, include the following events. - if (includInvalidatingEvents) { - // Rename the collection. - const collNameAfterRename = `${testColl.getName()}_renamed`; - assert.commandWorked(testColl.renameCollection(collNameAfterRename)); - testColl = db[collNameAfterRename]; - - // Rename it back. - assert.commandWorked(testColl.renameCollection(collName)); - testColl = db[collName]; - - // Drop the collection. - assert(testColl.drop()); - - // Drop the database. - assert.commandWorked(db.dropDatabase()); - } return testColl; }