next-mongo-shard-0-0-1:logs# curl -X POST -H "Accept:application/json" -H "Content-Type: application/json" localhost:9083/connectors/ --data '{ > "name":"mongo-source-shardedDB-mongos", > "config": { > "connector.class":"com.mongodb.kafka.connect.MongoSourceConnector", > "key.converter":"org.apache.kafka.connect.json.JsonConverter", > "key.converter.schemas.enable":"false", > "value.converter":"org.apache.kafka.connect.json.JsonConverter", > "value.converter.schemas.enable":"false", > "connection.uri":"mongodb://10.74.3.104:27017", > "database":"shardedDB", > "collection":"shardedCollection", > "publish.full.document.only":"true", > "topic.prefix":"test", > "batch.size":"5000", > "copy.existing":"true", > "copy.existing.max.threads":"3", > "copy.existing.queue.size":"64000"} > }' [2020-06-13 10:32:10,731] INFO Cluster created with settings {hosts=[10.74.3.104:27017], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms', maxWaitQueueSize=500} (org.mongodb.driver.cluster:71) [2020-06-13 10:32:10,737] INFO Opened connection [connectionId{localValue:25}] to 10.74.3.104:27017 (org.mongodb.driver.connection:71) [2020-06-13 10:32:10,738] INFO Monitor thread successfully connected to server with description ServerDescription{address=10.74.3.104:27017, type=SHARD_ROUTER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 6, 8]}, minWireVersion=0, maxWireVersion=6, maxDocumentSize=16777216, logicalSessionTimeoutMinutes=30, roundTripTimeNanos=1043313} (org.mongodb.driver.cluster:71) [2020-06-13 10:32:10,739] INFO AbstractConfig values: (org.apache.kafka.common.config.AbstractConfig:347) [2020-06-13 10:32:11,648] INFO [Worker clientId=connect-1, groupId=connect-cluster] Connector mongo-source-shardedDB-mongos config updated (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1394) [2020-06-13 10:32:11,648] INFO [Worker clientId=connect-1, groupId=connect-cluster] Rebalance started (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:222) [2020-06-13 10:32:11,648] INFO [Worker clientId=connect-1, groupId=connect-cluster] (Re-)joining group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:533) {"name":"mongo-source-shardedDB-mongos","config":{"connector.class":"com.mongodb.kafka.connect.MongoSourceConnector","key.converter":"org.apache.kafka.connect.json.JsonConverter","key.converter.schemas.enable":"false","value.converter":"org.apache.kafka.connect.json.JsonConverter","value.converter.schemas.enable":"false","connection.uri":"mongodb://10.74.3.104:27017","database":"shardedDB","collection":"shardedCollection","publish.full.document.only":"true","topic.prefix":"test","batch.size":"5000","copy.existing":"true","copy.existing.max.threads":"3","copy.existing.queue.size":"64000","name":"mongo-source-shardedDB-mongos"},"tasks":[],"type":"source"}next-mongo-shard-0-0-1:logs# [2020-06-13 10:32:11,654] INFO [Worker clientId=connect-1, groupId=connect-cluster] Successfully joined group with generation 33 (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:484) [2020-06-13 10:32:11,654] INFO [Worker clientId=connect-1, groupId=connect-cluster] Joined group at generation 33 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-20f1db32-6c53-4871-bbd7-760a43184de1', leaderUrl='http://10.74.3.142:9083/', offset=40, connectorIds=[mongo-source-shardedDB-mongos, mongo-source-assets-mongos], taskIds=[mongo-source-assets-mongos-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1540) [2020-06-13 10:32:11,655] INFO [Worker clientId=connect-1, groupId=connect-cluster] Starting connectors and tasks using config offset 40 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1104) [2020-06-13 10:32:11,655] INFO [Worker clientId=connect-1, groupId=connect-cluster] Starting connector mongo-source-shardedDB-mongos (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1179) [2020-06-13 10:32:11,656] INFO ConnectorConfig values: config.action.reload = restart connector.class = com.mongodb.kafka.connect.MongoSourceConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = mongo-source-shardedDB-mongos tasks.max = 1 transforms = [] value.converter = class org.apache.kafka.connect.json.JsonConverter (org.apache.kafka.connect.runtime.ConnectorConfig:347) [2020-06-13 10:32:11,656] INFO EnrichedConnectorConfig values: config.action.reload = restart connector.class = com.mongodb.kafka.connect.MongoSourceConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = mongo-source-shardedDB-mongos tasks.max = 1 transforms = [] value.converter = class org.apache.kafka.connect.json.JsonConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:347) [2020-06-13 10:32:11,656] INFO Creating connector mongo-source-shardedDB-mongos of type com.mongodb.kafka.connect.MongoSourceConnector (org.apache.kafka.connect.runtime.Worker:251) [2020-06-13 10:32:11,657] INFO Instantiated connector mongo-source-shardedDB-mongos with version 1.1.0 of type class com.mongodb.kafka.connect.MongoSourceConnector (org.apache.kafka.connect.runtime.Worker:254) [2020-06-13 10:32:11,657] INFO Finished creating connector mongo-source-shardedDB-mongos (org.apache.kafka.connect.runtime.Worker:273) [2020-06-13 10:32:11,658] INFO SourceConnectorConfig values: config.action.reload = restart connector.class = com.mongodb.kafka.connect.MongoSourceConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = mongo-source-shardedDB-mongos tasks.max = 1 transforms = [] value.converter = class org.apache.kafka.connect.json.JsonConverter (org.apache.kafka.connect.runtime.SourceConnectorConfig:347) [2020-06-13 10:32:11,658] INFO EnrichedConnectorConfig values: config.action.reload = restart connector.class = com.mongodb.kafka.connect.MongoSourceConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = mongo-source-shardedDB-mongos tasks.max = 1 transforms = [] value.converter = class org.apache.kafka.connect.json.JsonConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:347) [2020-06-13 10:32:14,651] INFO [Worker clientId=connect-1, groupId=connect-cluster] Tasks [mongo-source-shardedDB-mongos-0] configs updated (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1409) [2020-06-13 10:32:14,652] INFO [Worker clientId=connect-1, groupId=connect-cluster] Finished starting connectors and tasks (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1125) [2020-06-13 10:32:14,653] INFO [Worker clientId=connect-1, groupId=connect-cluster] Handling task config update by restarting tasks [] (org.apache.kafka.connect.runtime.distributed.DistributedHerder:574) [2020-06-13 10:32:14,653] INFO [Worker clientId=connect-1, groupId=connect-cluster] Rebalance started (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:222) [2020-06-13 10:32:14,654] INFO [Worker clientId=connect-1, groupId=connect-cluster] (Re-)joining group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:533) [2020-06-13 10:32:14,659] INFO [Worker clientId=connect-1, groupId=connect-cluster] Successfully joined group with generation 34 (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:484) [2020-06-13 10:32:14,659] INFO [Worker clientId=connect-1, groupId=connect-cluster] Joined group at generation 34 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-20f1db32-6c53-4871-bbd7-760a43184de1', leaderUrl='http://10.74.3.142:9083/', offset=42, connectorIds=[mongo-source-shardedDB-mongos, mongo-source-assets-mongos], taskIds=[mongo-source-shardedDB-mongos-0, mongo-source-assets-mongos-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1540) [2020-06-13 10:32:14,660] INFO [Worker clientId=connect-1, groupId=connect-cluster] Starting connectors and tasks using config offset 42 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1104) [2020-06-13 10:32:14,660] INFO [Worker clientId=connect-1, groupId=connect-cluster] Starting task mongo-source-shardedDB-mongos-0 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1139) [2020-06-13 10:32:14,661] INFO Creating task mongo-source-shardedDB-mongos-0 (org.apache.kafka.connect.runtime.Worker:419) [2020-06-13 10:32:14,661] INFO ConnectorConfig values: config.action.reload = restart connector.class = com.mongodb.kafka.connect.MongoSourceConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = mongo-source-shardedDB-mongos tasks.max = 1 transforms = [] value.converter = class org.apache.kafka.connect.json.JsonConverter (org.apache.kafka.connect.runtime.ConnectorConfig:347) [2020-06-13 10:32:14,661] INFO EnrichedConnectorConfig values: config.action.reload = restart connector.class = com.mongodb.kafka.connect.MongoSourceConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = mongo-source-shardedDB-mongos tasks.max = 1 transforms = [] value.converter = class org.apache.kafka.connect.json.JsonConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:347) [2020-06-13 10:32:14,662] INFO TaskConfig values: task.class = class com.mongodb.kafka.connect.source.MongoSourceTask (org.apache.kafka.connect.runtime.TaskConfig:347) [2020-06-13 10:32:14,662] INFO Instantiated task mongo-source-shardedDB-mongos-0 with version 1.1.0 of type com.mongodb.kafka.connect.source.MongoSourceTask (org.apache.kafka.connect.runtime.Worker:434) [2020-06-13 10:32:14,662] INFO JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false (org.apache.kafka.connect.json.JsonConverterConfig:347) [2020-06-13 10:32:14,662] INFO JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false (org.apache.kafka.connect.json.JsonConverterConfig:347) [2020-06-13 10:32:14,662] INFO Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task mongo-source-shardedDB-mongos-0 using the connector config (org.apache.kafka.connect.runtime.Worker:449) [2020-06-13 10:32:14,663] INFO Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task mongo-source-shardedDB-mongos-0 using the connector config (org.apache.kafka.connect.runtime.Worker:455) [2020-06-13 10:32:14,663] INFO Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task mongo-source-shardedDB-mongos-0 using the worker config (org.apache.kafka.connect.runtime.Worker:460) [2020-06-13 10:32:14,663] INFO Initializing: org.apache.kafka.connect.runtime.TransformationChain{} (org.apache.kafka.connect.runtime.Worker:514) [2020-06-13 10:32:14,664] INFO ProducerConfig values: acks = all batch.size = 16384 bootstrap.servers = [localhost:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = connector-producer-mongo-source-shardedDB-mongos-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 25165824 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer (org.apache.kafka.clients.producer.ProducerConfig:347) [2020-06-13 10:32:14,667] INFO Kafka version: 2.4.0 (org.apache.kafka.common.utils.AppInfoParser:117) [2020-06-13 10:32:14,667] INFO Kafka commitId: 77a89fcf8d7fa018 (org.apache.kafka.common.utils.AppInfoParser:118) [2020-06-13 10:32:14,668] INFO Kafka startTimeMs: 1592069534667 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-06-13 10:32:14,669] INFO [Worker clientId=connect-1, groupId=connect-cluster] Finished starting connectors and tasks (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1125) [2020-06-13 10:32:14,671] INFO Cluster created with settings {hosts=[10.74.3.104:27017], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms', maxWaitQueueSize=500} (org.mongodb.driver.cluster:71) [2020-06-13 10:32:14,677] INFO Opened connection [connectionId{localValue:26}] to 10.74.3.104:27017 (org.mongodb.driver.connection:71) [2020-06-13 10:32:14,678] INFO Monitor thread successfully connected to server with description ServerDescription{address=10.74.3.104:27017, type=SHARD_ROUTER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 6, 8]}, minWireVersion=0, maxWireVersion=6, maxDocumentSize=16777216, logicalSessionTimeoutMinutes=30, roundTripTimeNanos=944266} (org.mongodb.driver.cluster:71) [2020-06-13 10:32:14,770] INFO [Producer clientId=connector-producer-mongo-source-shardedDB-mongos-0] Cluster ID: qOW9ycN1RUWenv2HrwUADQ (org.apache.kafka.clients.Metadata:261) [2020-06-13 10:32:15,652] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:15,658] INFO Opened connection [connectionId{localValue:27}] to 10.74.3.104:27017 (org.mongodb.driver.connection:71) [2020-06-13 10:32:16,668] INFO Copying existing data on the following namespaces: [shardedDB.shardedCollection] (com.mongodb.kafka.connect.source.MongoCopyDataManager:82) [2020-06-13 10:32:16,669] INFO WorkerSourceTask{id=mongo-source-shardedDB-mongos-0} Source task finished initialization and start (org.apache.kafka.connect.runtime.WorkerSourceTask:209) [2020-06-13 10:32:21,778] INFO Creating topic test.shardedDB.shardedCollection with configuration {} and initial partition assignment HashMap(0 -> ArrayBuffer(3)) (kafka.zk.AdminZkClient) [2020-06-13 10:32:21,783] INFO [KafkaApi-1] Auto creation of topic test.shardedDB.shardedCollection with 1 partitions and replication factor 1 is successful (kafka.server.KafkaApis) [2020-06-13 10:32:21,784] WARN [Producer clientId=connector-producer-mongo-source-shardedDB-mongos-0] Error while fetching metadata with correlation id 3 : {test.shardedDB.shardedCollection=LEADER_NOT_AVAILABLE} (org.apache.kafka.clients.NetworkClient:1063) [2020-06-13 10:32:24,669] INFO WorkerSourceTask{id=mongo-source-shardedDB-mongos-0} Committing offsets (org.apache.kafka.connect.runtime.WorkerSourceTask:416) [2020-06-13 10:32:24,670] INFO WorkerSourceTask{id=mongo-source-shardedDB-mongos-0} flushing 8365 outstanding messages for offset commit (org.apache.kafka.connect.runtime.WorkerSourceTask:433) [2020-06-13 10:32:28,337] INFO WorkerSourceTask{id=mongo-source-shardedDB-mongos-0} Finished commitOffsets successfully in 3668 ms (org.apache.kafka.connect.runtime.WorkerSourceTask:515) [2020-06-13 10:32:28,677] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:28,678] INFO Resuming the change stream after the previous offset (com.mongodb.kafka.connect.source.MongoSourceTask:234) [2020-06-13 10:32:28,679] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:28,680] INFO Resuming the change stream after the previous offset using resumeAfter (com.mongodb.kafka.connect.source.MongoSourceTask:237) [2020-06-13 10:32:28,682] INFO Failed to resume change stream: Bad resume token: _data of missing or of wrong type{_id: "cdb1b0158f42afb2e4513ea7dccd0952", copyingData: true} 40647 (com.mongodb.kafka.connect.source.MongoSourceTask:253) [2020-06-13 10:32:33,688] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:33,689] INFO Resuming the change stream after the previous offset using resumeAfter (com.mongodb.kafka.connect.source.MongoSourceTask:237) [2020-06-13 10:32:33,690] INFO Failed to resume change stream: Bad resume token: _data of missing or of wrong type{_id: "cdb1b0158f42afb2e4513ea7dccd0952", copyingData: true} 40647 (com.mongodb.kafka.connect.source.MongoSourceTask:253) [2020-06-13 10:32:34,690] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:34,691] INFO Resuming the change stream after the previous offset using resumeAfter (com.mongodb.kafka.connect.source.MongoSourceTask:237) [2020-06-13 10:32:34,692] INFO Failed to resume change stream: Bad resume token: _data of missing or of wrong type{_id: "cdb1b0158f42afb2e4513ea7dccd0952", copyingData: true} 40647 (com.mongodb.kafka.connect.source.MongoSourceTask:253) [2020-06-13 10:32:38,699] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:38,699] INFO Resuming the change stream after the previous offset using resumeAfter (com.mongodb.kafka.connect.source.MongoSourceTask:237) [2020-06-13 10:32:38,701] INFO Failed to resume change stream: Bad resume token: _data of missing or of wrong type{_id: "cdb1b0158f42afb2e4513ea7dccd0952", copyingData: true} 40647 (com.mongodb.kafka.connect.source.MongoSourceTask:253) [2020-06-13 10:32:39,701] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:39,702] INFO Resuming the change stream after the previous offset using resumeAfter (com.mongodb.kafka.connect.source.MongoSourceTask:237) [2020-06-13 10:32:39,703] INFO Failed to resume change stream: Bad resume token: _data of missing or of wrong type{_id: "cdb1b0158f42afb2e4513ea7dccd0952", copyingData: true} 40647 (com.mongodb.kafka.connect.source.MongoSourceTask:253) [2020-06-13 10:32:41,945] INFO WorkerSourceTask{id=mongo-source-shardedDB-mongos-0} Committing offsets (org.apache.kafka.connect.runtime.WorkerSourceTask:416) [2020-06-13 10:32:41,945] INFO WorkerSourceTask{id=mongo-source-shardedDB-mongos-0} flushing 0 outstanding messages for offset commit (org.apache.kafka.connect.runtime.WorkerSourceTask:433) [2020-06-13 10:32:42,947] INFO WorkerSourceTask{id=mongo-source-shardedDB-mongos-0} Finished commitOffsets successfully in 1002 ms (org.apache.kafka.connect.runtime.WorkerSourceTask:515) [2020-06-13 10:32:43,709] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:43,710] INFO Resuming the change stream after the previous offset using resumeAfter (com.mongodb.kafka.connect.source.MongoSourceTask:237) [2020-06-13 10:32:43,711] INFO Failed to resume change stream: Bad resume token: _data of missing or of wrong type{_id: "a014a718967594b1c9c192dcc5c0cf62", copyingData: true} 40647 (com.mongodb.kafka.connect.source.MongoSourceTask:253) [2020-06-13 10:32:44,711] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:44,712] INFO Resuming the change stream after the previous offset using resumeAfter (com.mongodb.kafka.connect.source.MongoSourceTask:237) [2020-06-13 10:32:44,713] INFO Failed to resume change stream: Bad resume token: _data of missing or of wrong type{_id: "a014a718967594b1c9c192dcc5c0cf62", copyingData: true} 40647 (com.mongodb.kafka.connect.source.MongoSourceTask:253) [2020-06-13 10:32:48,718] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:48,719] INFO Resuming the change stream after the previous offset using resumeAfter (com.mongodb.kafka.connect.source.MongoSourceTask:237) [2020-06-13 10:32:48,721] INFO Failed to resume change stream: Bad resume token: _data of missing or of wrong type{_id: "a014a718967594b1c9c192dcc5c0cf62", copyingData: true} 40647 (com.mongodb.kafka.connect.source.MongoSourceTask:253) [2020-06-13 10:32:49,912] INFO Watching for collection changes on 'shardedDB.shardedCollection' (com.mongodb.kafka.connect.source.MongoSourceTask:374) [2020-06-13 10:32:49,913] INFO Resuming the change stream after the previous offset using resumeAfter (com.mongodb.kafka.connect.source.MongoSourceTask:237) [2020-06-13 10:32:49,915] INFO Failed to resume change stream: Bad resume token: _data of missing or of wrong type{_id: "a014a718967594b1c9c192dcc5c0cf62", copyingData: true} 40647 (com.mongodb.kafka.connect.source.MongoSourceTask:253)