<!-- 
RSS generated by JIRA (9.7.1#970001-sha1:2222b88b221c4928ef0de3161136cc90c8356a66) at Thu Feb 08 06:07:34 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>MongoDB Jira</title>
    <link>https://jira.mongodb.org</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.7.1</version>
        <build-number>970001</build-number>
        <build-date>13-04-2023</build-date>
    </build-info>


<item>
            <title>[SERVER-67217] High query response on primary (3-4 sec range) in mongo 4.2.20 version </title>
                <link>https://jira.mongodb.org/browse/SERVER-67217</link>
                <project id="10000" key="SERVER">Core Server</project>
                    <description>&lt;p&gt;&lt;b&gt;Scenario&lt;/b&gt; - We run tests with same load on 2 mongo versions(4.0.27,4.2.20).&lt;br/&gt;
We found high query response on primary(in range of 3-4 seconds) for 4.2.20 version while found no issue with 4.0.27.&lt;/p&gt;

&lt;p&gt;&lt;b&gt;High query response issue start timing&lt;/b&gt;: 2022-06-10 20:03 UTC&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Few high query response messages from primary:&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;2022-06-10T20:03:52.016+0000 I &#160;WRITE &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn425&amp;#93;&lt;/span&gt; update drasessions_1.drasessions command: { q: { _id: &lt;/p&gt;
{ sessionid: &quot;ClpGx0:172.16.241.111:5021:1654866656:0002395073&quot; }
&lt;p&gt;, ts: { $lte: 1654891428485 }, srk: &quot;serverb1.relay&quot; }, u: { _id: &lt;/p&gt;
{ sessionid: &quot;ClpGx0:172.16.241.111:5021:1654866656:0002395073&quot; }
&lt;p&gt;, ts: 1654891428485, srk: &quot;serverb1.relay&quot;, originHost: &quot;site-b-client-calipers21-gx.pcef.gx1&quot;, originRealm: &quot;client-consumer.calipers.pcef.gx&quot;, destHost: &quot;site-b-server-calipers81-gx.pcef.b-gx-b1.2&quot;, destRealm: &quot;server-consumer.calipers.pcef.gx&quot;, apn: &quot;ims.com&quot;, imsi: &quot;320000000295072&quot;, ipv6: &quot;3101:0000:0004:809f&quot;, msisdn: &quot;420000000295072&quot;, nextEvalTime: new Date(1654981428485), staleSessionExpiryCount: 3, staleBindingRefreshTime: new Date(1656619428486), sessionid: &quot;ClpGx0:172.16.241.111:5021:1654866656:0002395073&quot;, systemId: &quot;vpas-system-2&quot;, uuid: &quot;vpas-system-21357665800&quot; }, multi: false, upsert: true } planSummary: IXSCAN { _id: 1 } keysExamined:0 docsExamined:0 nMatched:0 nModified:0 upsert:1 keysInserted:2 numYields:0 queryHash:1380E850 planCacheKey:9313C0BA locks:{ ParallelBatchWriterMode: { acquireCount: &lt;/p&gt;
{ r: 1 }
&lt;p&gt; }, ReplicationStateTransition: { acquireCount: &lt;/p&gt;
{ w: 1 }
&lt;p&gt; }, Global: { acquireCount: &lt;/p&gt;
{ w: 1 }
&lt;p&gt; }, Database: { acquireCount: &lt;/p&gt;
{ w: 1 }
&lt;p&gt; }, Collection: { acquireCount: &lt;/p&gt;
{ w: 1 }
&lt;p&gt; }, Mutex: { acquireCount: &lt;/p&gt;
{ r: 2 }
&lt;p&gt; } } flowControl:{ acquireCount: 1 } storage:{} &lt;b&gt;3530ms&lt;/b&gt;&lt;br/&gt;
2022-06-10T20:03:52.016+0000 I &#160;COMMAND &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn52&amp;#93;&lt;/span&gt; command ipv6bindings_1.ipv6bindings command: find { find: &quot;ipv6bindings&quot;, filter: &lt;/p&gt;
{ _id: &quot;3101:0000:0009:11c1&quot; }
&lt;p&gt;, limit: 1, singleBatch: true, $db: &quot;ipv6bindings_1&quot;, $clusterTime: { clusterTime: Timestamp(1654891428, 246), signature: &lt;/p&gt;
{ hash: BinData(0, 1AE068B7041E0DF364A48B3B76192706D425B50D), keyId: 7072624733400858628 }
&lt;p&gt; }, lsid: { id: UUID(&quot;55caafc8-fbb1-4ede-a1d8-dd788fadb341&quot;) }, $readPreference: { mode: &quot;nearest&quot; } } planSummary: IDHACK keysExamined:1 docsExamined:1 cursorExhausted:1 numYields:1 nreturned:1 reslen:520 locks:{ ReplicationStateTransition: { acquireCount: &lt;/p&gt;
{ w: 2 }
&lt;p&gt; }, Global: { acquireCount: &lt;/p&gt;
{ r: 2 }
&lt;p&gt; }, Database: { acquireCount: &lt;/p&gt;
{ r: 2 }
&lt;p&gt; }, Collection: { acquireCount: &lt;/p&gt;
{ r: 2 }
&lt;p&gt; }, Mutex: { acquireCount: &lt;/p&gt;
{ r: 1 }
&lt;p&gt; } } storage:{} protocol:op_msg &lt;b&gt;3441ms&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Attached files:&lt;/b&gt;&#160;&lt;br/&gt;
1) &lt;b&gt;rs.status.txt:&lt;/b&gt; contains ** replica set status(attached with the ticket)&lt;br/&gt;
2) &lt;b&gt;&lt;font color=&quot;#FF0000&quot;&gt;mongo-diag-log.tar.gz(need to upload)&lt;/font&gt;: kindly provide support portal link to upload&lt;/b&gt; (contains mongod.logs and diagnostic.data for all data members) file structure is given below:&lt;br/&gt;
root@xyz:~$ sudo tar -tf mongo-diag-log.tar.gz&lt;br/&gt;
mongo-27029_PRIMARY_2a.tar.gz&lt;br/&gt;
mongo-27029-SECONDARY_2b.tar.gz&lt;br/&gt;
mongo-27029-SECONDARY_8a.tar.gz&lt;br/&gt;
mongo-27029-SECONDARY_8b.tar.gz&lt;br/&gt;
root@xyz:~$ tar -tf mongo-27029_PRIMARY_2a.tar.gz&lt;br/&gt;
mongo-27029_PRIMARY.log&lt;br/&gt;
diagnostic.data/&lt;br/&gt;
diagnostic.data/metrics.interim&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T14-51-15Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T22-51-19Z-00000&lt;br/&gt;
root@xyz~$ sudo tar -tf mongo-27029-SECONDARY_2b.tar.gz&lt;br/&gt;
mongo-27029-SECONDARY_2b.log&lt;br/&gt;
diagnostic.data/&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T21-26-19Z-00000&lt;br/&gt;
diagnostic.data/metrics.interim&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T04-56-24Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-11T05-01-19Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T13-51-31Z-00000&lt;br/&gt;
root@xyz:~$ sudo tar -tf mongo-27029-SECONDARY_8a.tar.gz&lt;br/&gt;
mongo-27029-SECONDARY_8a.log&lt;br/&gt;
diagnostic.data/&lt;br/&gt;
diagnostic.data/metrics.2022-06-09T18-40-08Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T03-25-08Z-00000&lt;br/&gt;
diagnostic.data/metrics.interim&lt;br/&gt;
diagnostic.data/metrics.2022-06-09T01-21-07Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T13-01-28Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T22-36-19Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-09T10-00-08Z-00000&lt;br/&gt;
root@xyz:~$ sudo tar -tf mongo-27029-SECONDARY_8b.tar.gz&lt;br/&gt;
mongo-27029-SECONDARY_8b.log&lt;br/&gt;
diagnostic.data/&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T20-21-19Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T10-56-28Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-11T06-06-19Z-00000&lt;br/&gt;
diagnostic.data/metrics.interim&lt;br/&gt;
diagnostic.data/metrics.2022-06-10T01-50-08Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-09T06-09-14Z-00000&lt;br/&gt;
diagnostic.data/metrics.2022-06-09T15-55-08Z-00000&lt;/p&gt;</description>
                <environment>java client driver on client side - 3.12.9&lt;br/&gt;
mongo version - 4.2.20&lt;br/&gt;
Storage Engine - WT&lt;br/&gt;
Storage Type - tmpfs &lt;br/&gt;
Replica-set : 7 members (4 non-arbiter and 3 arbiter , all voting members)&lt;br/&gt;
One of the member CMD as an example:&lt;br/&gt;
mongod --keyFile=/mongodb.key --enableMajorityReadConcern false --ipv6 --bind_ip_all --port 27021 --dbpath=/data/db/wt-27021 --replSet rs-shard-2 --quiet --slowms 500 --logpath /data/db/mongo-27021.log --setParameter diagnosticDataCollectionEnabled=true --logappend --oplogSize 3221 --logRotate reopen --wiredTigerCacheSizeGB 4.40</environment>
        <key id="2066453">SERVER-67217</key>
            <summary>High query response on primary (3-4 sec range) in mongo 4.2.20 version </summary>
                <type id="1" iconUrl="https://jira.mongodb.org/secure/viewavatar?size=xsmall&amp;avatarId=14703&amp;avatarType=issuetype">Bug</type>
                                            <priority id="3" iconUrl="https://jira.mongodb.org/images/icons/priorities/major.svg">Major - P3</priority>
                        <status id="6" iconUrl="https://jira.mongodb.org/images/icons/statuses/closed.png" description="The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.">Closed</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="9">Done</resolution>
                                        <assignee username="chris.kelly@mongodb.com">Chris Kelly</assignee>
                                    <reporter username="kg3634@gmail.com">KAPIL GUPTA</reporter>
                        <labels>
                            <label>Bug</label>
                    </labels>
                <created>Sat, 11 Jun 2022 07:48:57 +0000</created>
                <updated>Wed, 18 Jan 2023 06:28:23 +0000</updated>
                            <resolved>Thu, 1 Sep 2022 19:24:39 +0000</resolved>
                                    <version>4.2.20</version>
                                                    <component>Performance</component>
                    <component>Querying</component>
                    <component>WiredTiger</component>
                                        <votes>9</votes>
                                    <watches>7</watches>
                                                                                                                <comments>
                            <comment id="5072575" author="JIRAUSER1264730" created="Thu, 22 Dec 2022 13:42:01 +0000"  >&lt;p&gt;Hi Christopher,&lt;/p&gt;

&lt;p&gt;We tested with 4.2 and 4.4 and again we found same issue on 4.2 member.&lt;/p&gt;

&lt;p&gt;This time we also collected disk read write but we did not find any high disk usage.&lt;/p&gt;

&lt;p&gt;Kindly reopen the case and please provide link to upload the supported logs.&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Thanks,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4796201" author="JIRAUSER1265262" created="Thu, 1 Sep 2022 19:17:56 +0000"  >&lt;p&gt;Hi Kapil,&lt;/p&gt;

&lt;p&gt;Thank you for your patience. We have looked through your data and and have not found information that implicates the mongod for your latency issues. Unfortunately, at this time that means we&apos;re unable to determine the cause of the behavior from the given information.&lt;/p&gt;

&lt;p&gt;Some points that were interesting:&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;In all cases you presented, there is high, unexplained disk usage (not from the mongod) that eventually leads the mongod start queueing for a few seconds until it resolves itself. This is unexplained because it does not correlate with activity in the journal (wt log log) or the block manager.&#160;
	&lt;ul&gt;
		&lt;li&gt;Point A: 2022-07-05T00:45:48.308Z&#160;&lt;br/&gt;
  &lt;span class=&quot;image-wrap&quot; style=&quot;&quot;&gt;&lt;img src=&quot;https://jira.mongodb.org/secure/attachment/399472/399472_image-2022-09-01-15-15-42-817.png&quot; width=&quot;100%&quot; style=&quot;border: 0px solid black&quot; /&gt;&lt;/span&gt;&lt;/li&gt;
	&lt;/ul&gt;
	&lt;/li&gt;
	&lt;li&gt;The data you provided comparing 4.0 and 4.2 was mentioned as being in two separate deployments (&quot;sites&quot; in you case), so it is possible there are differences between these two that could explain the issue&lt;/li&gt;
&lt;/ul&gt;


&lt;ul&gt;
	&lt;li&gt;In your 4.2 data, I see a cyclic high disk usage very 15 minutes from some unexplained process that leads to the mongod queueing. I don&apos;t see this on your 4.0 data (and I see multiple extra drives on your 4.0 instance, which further suggests your sites vary in some other way that can explain the problem)&lt;br/&gt;
 &lt;span class=&quot;image-wrap&quot; style=&quot;&quot;&gt;&lt;img src=&quot;https://jira.mongodb.org/secure/attachment/399468/399468_image-2022-09-01-15-09-05-589.png&quot; width=&quot;100%&quot; style=&quot;border: 0px solid black&quot; /&gt;&lt;/span&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;I would suggest an investigation into what may be occurring on this system to cause this, since it is not evident that it is coming from the mongod. As it stands I am going to close this because it seems something else is doing this every 15 minutes.&lt;/p&gt;

&lt;p&gt;If you like, you can take this discussion further&#160; by asking our community for help by posting on the &lt;a href=&quot;https://www.mongodb.com/community/forums&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;MongoDB Developer Community Forums&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;If the discussion there leads you to suspect a bug in the MongoDB server, then we&apos;d want to investigate it as a possible bug here in the SERVER project. Specifically in this case, that would also entail figuring out the source of your disk activity here.&lt;/p&gt;

&lt;p&gt;Regards,&lt;br/&gt;
Christopher&lt;/p&gt;</comment>
                            <comment id="4752265" author="JIRAUSER1264730" created="Tue, 16 Aug 2022 01:20:11 +0000"  >&lt;p&gt;Hi Christopher,&lt;/p&gt;

&lt;p&gt;Just a gentle reminder! please have a look.&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Regards,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4730625" author="JIRAUSER1264730" created="Fri, 5 Aug 2022 10:45:21 +0000"  >&lt;p&gt;Thanks much Christopher for quick response and providing me the link.&lt;/p&gt;

&lt;p&gt;I have uploaded the&#160;&lt;b&gt;combinedmongologs.tar.gz&lt;/b&gt; in the portal. Kindly let us know your observation.&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Regards,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4730598" author="JIRAUSER1265262" created="Fri, 5 Aug 2022 10:14:16 +0000"  >&lt;p&gt;Hi Kapil,&lt;/p&gt;

&lt;p&gt;I&apos;ve gone ahead and created another &lt;a href=&quot;https://amphora.corp.mongodb.com/public/upload/eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJmb2xkZXJfaWQiOiIxNjkxMTcwNTc5NTAiLCJleHAiOjE2NjIyODU3MTl9.lBsAxgQWHubm3HgcUJr7Y-wrNzg6BrdgFrD2OUhmOUc&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;Amphora Portal link &lt;/a&gt;for you to upload to.&#160;&lt;/p&gt;

&lt;p&gt;As for not being able to collect gdb, I would also suggest the SIGUSR2 signal but I believe this is only supported in newer versions of MongoDB. The backtrace would clarify what is actually being worked on, so without it I&apos;ll have to infer what bad behavior is going on. I can take another look at your next set of logs at the timestamp you specified and see if I can discern something interesting.&lt;/p&gt;

&lt;p&gt;Christopher&lt;/p&gt;</comment>
                            <comment id="4730422" author="JIRAUSER1264730" created="Fri, 5 Aug 2022 07:47:20 +0000"  >&lt;p&gt;Hi Christopher,&lt;/p&gt;

&lt;p&gt;We did testing again and got total 3 occurances(collected logs for 2) for the issue.&lt;/p&gt;

&lt;p&gt;This time, we collected additional logs also (top output, journalctl logs and dmesg, mongotop and mognostat).&lt;/p&gt;

&lt;p&gt;Please find the details for the logs along with observation given below:&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Occurrence 1:&lt;/b&gt; mongo server did not respond for &#160;Aug &#160;3 14:45:36 timestamp which created multiple problems(high query response in next few second as client side it got disconnected message and it tried to reconnect which created pressure on mongo which increases cpu on mongo)&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;supported logs1:&lt;/b&gt; in mongo logs(name: &lt;b&gt;mongo-27045.log.issue1.gz&lt;/b&gt;), we observed that 3 the time stamps (14:45:35,14:45:36,14:45:37) were not in order, 14:45:37 came before 14:45:35 and 14:45:36 in some instance like given below:&lt;br/&gt;
2022-08-03T&lt;b&gt;14:45:35&lt;/b&gt;.971+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn283&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key: 
{ _id: 1 }
&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_1.imsiApnbindings&quot; }&lt;br/&gt;
2022-08-03T&lt;b&gt;14:45:37&lt;/b&gt;.553+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn283&amp;#93;&lt;/span&gt; Only one plan is available; it will be run but will not be cached. ns: imsiApnbindings_1.imsiApnbindings query: &lt;/p&gt;
&lt;div class=&quot;error&quot;&gt;&lt;span class=&quot;error&quot;&gt;Unknown macro: { _id}&lt;/span&gt; &lt;/div&gt;
&lt;p&gt; sort: {} projection: {}, planSummary: IXSCAN { _id: 1 }&lt;br/&gt;
2022-08-03T14:45:37.553+0000 I &#160;NETWORK &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn81&amp;#93;&lt;/span&gt; end connection &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:ae&amp;#93;&lt;/span&gt;:54464 (748 connections now open)&lt;br/&gt;
2022-08-03T14:45:37.553+0000 I &#160;NETWORK &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn19&amp;#93;&lt;/span&gt; end connection &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:bf&amp;#93;&lt;/span&gt;:46106 (747 connections now open)&lt;br/&gt;
2022-08-03T14:45:35.972+0000 I &#160;COMMAND &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn43&amp;#93;&lt;/span&gt; command admin.$cmd command: isMaster &lt;/p&gt;
{ ismaster: 1, $db: &quot;admin&quot;, $clusterTime:
Unknown macro: \{ clusterTime}
&lt;p&gt;} numYields:0 reslen:1072 locks:{} protocol:op_msg 0ms&lt;br/&gt;
2022-08-03T14:45:35.972+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn1093&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key:&lt;/p&gt;
{ _id: 1 }
&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_2.imsiApnbindings&quot; }&lt;br/&gt;
2022-08-03T14:45:35.973+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn1034&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key:&lt;/p&gt;
{ _id: 1 }
&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_2.imsiApnbindings&quot; }&lt;br/&gt;
2022-08-03T14:45:35.974+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn1146&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key:&lt;/p&gt;
{ _id: 1 }
&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_2.imsiApnbindings&quot; }&lt;/p&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;2022-08-03T&lt;b&gt;14:45:37.&lt;/b&gt;566+0000 I &#160;NETWORK &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn66&amp;#93;&lt;/span&gt; end connection &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:c2&amp;#93;&lt;/span&gt;:35400 (737 connections now open)&lt;br/&gt;
2022-08-03T&lt;b&gt;14:45:36&lt;/b&gt;.001+0000 I &#160;COMMAND &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn21&amp;#93;&lt;/span&gt; command admin.$cmd command: isMaster { ismaster: 1, $db: &quot;admin&quot;, $clusterTime: { clusterTime: Timestamp(1659537935, 1283), signature:&lt;/p&gt;

{ hash: BinData(0, 04438EF10586291946AB881A538C20917AA3EDB7), keyId: 7126121256901935109 }

&lt;p&gt;} } numYields:0 reslen:1072 locks:{} protocol:op_msg 0ms&lt;br/&gt;
2022-08-03T14:45:35.998+0000 I &#160;COMMAND &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn46&amp;#93;&lt;/span&gt; command admin.$cmd command: isMaster { ismaster: 1, $db: &quot;admin&quot;, $clusterTime: { clusterTime: Timestamp(1659537935, 987), signature:&lt;/p&gt;

{ hash: BinData(0, 04438EF10586291946AB881A538C20917AA3EDB7), keyId: 7126121256901935109 }

&lt;p&gt;} } numYields:0 reslen:1072 locks:{} protocol:op_msg 0ms&lt;br/&gt;
2022-08-03T14:45:36.001+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn1408&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key:&lt;/p&gt;

{ _id: 1 }

&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_2.imsiApnbindings&quot; }&lt;br/&gt;
2022-08-03T14:45:36.005+0000 I &#160;COMMAND &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn45&amp;#93;&lt;/span&gt; command admin.$cmd command: isMaster { ismaster: 1, $db: &quot;admin&quot;, $clusterTime: { clusterTime: Timestamp(1659537935, 1158), signature:&lt;/p&gt;

{ hash: BinData(0, 04438EF10586291946AB881A538C20917AA3EDB7), keyId: 7126121256901935109 }

&lt;p&gt;} } numYields:0 reslen:1072 locks:{} protocol:op_msg 0ms&lt;br/&gt;
2022-08-03T14:45:37.566+0000 I &#160;NETWORK &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn46&amp;#93;&lt;/span&gt; end connection&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;supported logs2&lt;/b&gt;: name: &lt;b&gt;mongostat27045.txt&lt;/b&gt;: 14:45:36 timesatmp is missed&lt;br/&gt;
insert query update delete getmore command dirty &#160;used flushes vsize &#160; res &#160; qrw &#160; arw net_in net_out conn &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; set repl &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;time&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 4 &#160; &#160;749 &#160; &#160;715 &#160; &#160; &#160;25 &#160; 147|0 &#160;1.7% 77.1% &#160; &#160; &#160; 0 6.07G 3.54G &#160; 0|1 &#160; 1|3 &#160; 847k &#160; 1.98m &#160;764 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:29.419&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 5 &#160; &#160;633 &#160; &#160;795 &#160; &#160; &#160;27 &#160; 149|0 &#160;1.7% 77.2% &#160; &#160; &#160; 0 6.07G 3.54G &#160; 0|0 &#160; 1|0 &#160; 792k &#160; 2.16m &#160;764 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:30.414&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 8 &#160; &#160;747 &#160; &#160;862 &#160; &#160; &#160;26 &#160; 150|0 &#160;1.7% 77.2% &#160; &#160; &#160; 0 6.07G 3.54G &#160; 0|0 &#160; 1|0 &#160; 900k &#160; 2.32m &#160;764 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:31.416&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 5 &#160; &#160;789 &#160; &#160;721 &#160; &#160; &#160;26 &#160; 175|0 &#160;1.8% 77.2% &#160; &#160; &#160; 0 6.07G 3.54G &#160; 0|0 &#160; 1|3 &#160; 881k &#160; 2.26m &#160;764 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:32.418&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 3 &#160; &#160;763 &#160; &#160;734 &#160; &#160; &#160;25 &#160; 149|0 &#160;1.8% 77.2% &#160; &#160; &#160; 0 6.07G 3.54G &#160; 0|1 &#160; 1|1 &#160; 856k &#160; 2.11m &#160;764 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:33.422&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 3 &#160; &#160;535 &#160; &#160;587 &#160; &#160; &#160;27 &#160; 150|0 &#160;1.8% 77.3% &#160; &#160; &#160; 0 6.07G 3.55G &#160; 0|0 &#160; 1|1 &#160; 653k &#160; 1.80m &#160;764 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:34.416&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 4 &#160; &#160;782 &#160; &#160;708 &#160; &#160; &#160;26 &#160; 186|0 &#160;1.8% 77.3% &#160; &#160; &#160; 0 6.07G 3.55G &#160; 0|1 &#160; 1|0 &#160; 869k &#160; 2.19m &#160;764 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:35.415&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 4 &#160; &#160;360 &#160; &#160;298 &#160; &#160; &#160; 7 &#160; &#160;65|0 &#160;1.8% 77.3% &#160; &#160; &#160; 0 6.07G 3.55G 0|391 5|128 &#160; 382k &#160; &#160;585k &#160;764 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:37.579&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 4 &#160; &#160;136 &#160; &#160;113 &#160; &#160; &#160;18 &#160;1287|0 &#160;1.9% 77.3% &#160; &#160; &#160; 0 5.83G 3.60G &#160; 0|4 &#160;1|53 &#160; 523k &#160; 4.22m &#160;490 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:37.798&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 6 &#160; 1378 &#160; &#160;646 &#160; &#160; &#160;19 &#160;3142|0 &#160;1.9% 77.3% &#160; &#160; &#160; 0 6.03G 3.62G &#160; 0|0 &#160;1|43 &#160;1.79m &#160; 4.92m &#160;724 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 14:45:38.524&lt;/li&gt;
	&lt;li&gt;&lt;b&gt;supported logs3:&lt;/b&gt; name: &lt;b&gt;mongotop27045.txt&lt;/b&gt;: 14:45:36 timestamp is missed&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;ns &#160; &#160;total &#160; &#160;read &#160; &#160;write &#160; &#160;2022-08-03T14:45:34Z&lt;br/&gt;
imsiApnbindings_1.imsiApnbindings &#160; &#160;948ms &#160; &#160; 0ms &#160; &#160;948ms&lt;br/&gt;
imsiApnbindings_2.imsiApnbindings &#160; &#160;801ms &#160; &#160; 0ms &#160; &#160;800ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;local.oplog.rs &#160; &#160; 13ms &#160; &#160;13ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; admin.system.keys &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.roles &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.users &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.version &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160;config.system.sessions &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; config.transactions &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; imsiApnbindings.imsiApnbindings &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;/p&gt;

&lt;p&gt;&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;ns &#160; &#160;total &#160; &#160;read &#160; &#160;write &#160; &#160;2022-08-03T14:45:37Z&lt;br/&gt;
imsiApnbindings_2.imsiApnbindings &#160; &#160;644ms &#160; &#160; 0ms &#160; &#160;644ms&lt;br/&gt;
imsiApnbindings_1.imsiApnbindings &#160; &#160;600ms &#160; &#160; 0ms &#160; &#160;600ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;local.oplog.rs &#160; &#160; 18ms &#160; &#160;18ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; admin.system.keys &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.roles &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.users &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.version &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160;config.system.sessions &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; config.transactions &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; imsiApnbindings.imsiApnbindings &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;/p&gt;

&lt;p&gt;&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;ns &#160; &#160; &#160; total &#160; &#160; &#160;read &#160; &#160; &#160; write &#160; &#160;2022-08-03T14:45:38Z&lt;br/&gt;
&#160; imsiApnbindings_2.imsiApnbindings &#160; &#160;458048ms &#160; &#160;1452ms &#160; &#160;456595ms&lt;br/&gt;
&#160; imsiApnbindings_1.imsiApnbindings &#160; &#160;410508ms &#160; &#160;5351ms &#160; &#160;405157ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;local.oplog.rs &#160; &#160; &#160; &#160;25ms &#160; &#160; &#160;25ms &#160; &#160; &#160; &#160; 0ms&lt;br/&gt;
msisdnApnbindings.msisdnApnbindings &#160; &#160; &#160; &#160; 8ms &#160; &#160; &#160; 8ms &#160; &#160; &#160; &#160; 0ms&lt;br/&gt;
&#160; &#160; imsiApnbindings.imsiApnbindings &#160; &#160; &#160; &#160; 6ms &#160; &#160; &#160; 6ms &#160; &#160; &#160; &#160; 0ms&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;supported logs3:&lt;/b&gt; we executed one sript which is collecting no of threads and no of file descriptors on mongo primary port. script details are given below:&lt;br/&gt;
#!/bin/bash&lt;br/&gt;
while true;do&lt;br/&gt;
&#160;printf &apos;%s &lt;del&gt;&amp;gt; %s\n&apos; &quot;$(date &apos;+%Y&lt;/del&gt;%m-%d %H:%M:%S:%3N&apos;)&quot; &quot;$(ps hH p 27401 | wc -l &amp;amp;&amp;amp; ls -l /proc/27401/fd | wc -l)&quot; &amp;gt;&amp;gt; thread.txt 2&amp;gt;&amp;amp;1&lt;br/&gt;
&#160;sleep 1;&lt;br/&gt;
done&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;output file name: &lt;b&gt;thread.txt&lt;/b&gt;: 14:45:37 timesatmp is missed&lt;/p&gt;

&lt;p&gt;2022-08-03 14:45:34 -&amp;gt; 818&lt;br/&gt;
1628&lt;br/&gt;
2022-08-03 14:45:35 -&amp;gt; 820&lt;br/&gt;
1630&lt;br/&gt;
2022-08-03 14:45:36 -&amp;gt; 820&lt;br/&gt;
1632&lt;br/&gt;
2022-08-03 14:45:38 -&amp;gt; 838&lt;br/&gt;
1668&lt;br/&gt;
2022-08-03 14:45:39 -&amp;gt; 1412&lt;br/&gt;
2814&lt;br/&gt;
2022-08-03 14:45:40 -&amp;gt; 1412&lt;br/&gt;
2814&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;supported logs4:&lt;/b&gt; name: &lt;b&gt;top_vm_output.log&lt;/b&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;top - 14:45:36 up 56 min, &#160;1 user, &#160;load average: 9.93, 8.67, 7.39&lt;br/&gt;
Tasks: 379 total, &#160; 1 running, 285 sleeping, &#160; 0 stopped, &#160; 0 zombie&lt;br/&gt;
%Cpu(s): 30.0 us, &#160;8.3 sy, &#160;0.0 ni, 40.0 id, 18.9 wa, &#160;0.0 hi, &#160;2.9 si, &#160;0.0 st&lt;br/&gt;
KiB Mem : 65969880 total, 36489024 free, 17296064 used, 12184792 buff/cache&lt;br/&gt;
KiB Swap: &#160;1046524 total, &#160;1046524 free, &#160; &#160; &#160; &#160;0 used. 40957040 avail Mem&lt;/p&gt;

&lt;p&gt;&#160; PID USER &#160; &#160; &#160;PR &#160;NI &#160; &#160;VIRT &#160; &#160;RES &#160; &#160;SHR S &#160;%CPU %MEM &#160; &#160; TIME+ COMMAND&lt;br/&gt;
27401 root &#160; &#160; &#160; 5 -15 6367796 3.549g &#160;40296 S &#160;65.2 &#160;5.6 &#160;40:41.11 mongod&lt;br/&gt;
&#160;3570 root &#160; &#160; &#160;20 &#160; 0 &#160; &#160;4792 &#160; 1656 &#160; 1372 D &#160;55.3 &#160;0.0 &#160; 0:28.48 gzip&lt;br/&gt;
13775 root &#160; &#160; &#160; 5 -15 5629592 3.483g &#160;39692 S &#160;14.9 &#160;5.5 &#160;11:11.79 mongod&lt;br/&gt;
13792 root &#160; &#160; &#160; 5 -15 5612844 3.473g &#160;39568 S &#160;14.9 &#160;5.5 &#160;11:35.67 mongod&lt;br/&gt;
13800 root &#160; &#160; &#160; 5 -15 5648408 3.509g &#160;39656 S &#160;14.6 &#160;5.6 &#160;11:06.83 mongod&lt;br/&gt;
&#160;1369 root &#160; &#160; &#160;20 &#160; 0 2909596 101380 &#160;52392 S &#160; 9.9 &#160;0.2 &#160; 8:23.87 dockerd&lt;br/&gt;
13802 root &#160; &#160; &#160; 5 -15 1976632 138672 &#160;38204 S &#160; 2.3 &#160;0.2 &#160; 1:17.86 mongod&lt;br/&gt;
&#160;8176 root &#160; &#160; &#160;20 &#160; 0 &#160;687072 &#160;16964 &#160; 9080 S &#160; 2.0 &#160;0.0 &#160; 0:42.09 node_exporter&lt;br/&gt;
13284 root &#160; &#160; &#160;20 &#160; 0 &#160;183120 &#160;59008 &#160;41724 S &#160; 2.0 &#160;0.1 &#160; 1:11.44 consul&lt;br/&gt;
&#8211;&lt;br/&gt;
top - 14:45:40 up 56 min, &#160;1 user, &#160;load average: 9.78, 8.66, 7.39&lt;br/&gt;
Tasks: 381 total, &#160; 3 running, 283 sleeping, &#160; 0 stopped, &#160; 0 zombie&lt;br/&gt;
%Cpu(s): 39.0 us, 12.1 sy, &#160;0.0 ni, 33.2 id, 10.3 wa, &#160;0.0 hi, &#160;5.5 si, &#160;0.0 st&lt;br/&gt;
KiB Mem : 65969880 total, 36228116 free, 17424420 used, 12317344 buff/cache&lt;br/&gt;
KiB Swap: &#160;1046524 total, &#160;1046524 free, &#160; &#160; &#160; &#160;0 used. 40724100 avail Mem&lt;/p&gt;

&lt;p&gt;&#160; PID USER &#160; &#160; &#160;PR &#160;NI &#160; &#160;VIRT &#160; &#160;RES &#160; &#160;SHR S &#160;%CPU %MEM &#160; &#160; TIME+ COMMAND&lt;br/&gt;
27401 root &#160; &#160; &#160; 5 -15 6968176 3.660g &#160;40296 S 163.9 &#160;5.8 &#160;40:47.06 mongod&lt;br/&gt;
&#160;3570 root &#160; &#160; &#160;20 &#160; 0 &#160; &#160;4792 &#160; 1656 &#160; 1372 R &#160;56.2 &#160;0.0 &#160; 0:30.52 gzip&lt;br/&gt;
13792 root &#160; &#160; &#160; 5 -15 5612844 3.473g &#160;39568 S &#160;16.0 &#160;5.5 &#160;11:36.25 mongod&lt;br/&gt;
13775 root &#160; &#160; &#160; 5 -15 5629592 3.483g &#160;39692 S &#160;15.7 &#160;5.5 &#160;11:12.36 mongod&lt;br/&gt;
13800 root &#160; &#160; &#160; 5 -15 5648408 3.508g &#160;39656 S &#160;15.2 &#160;5.6 &#160;11:07.38 mongod&lt;br/&gt;
&#160;1369 root &#160; &#160; &#160;20 &#160; 0 2909596 101380 &#160;52392 S &#160;11.6 &#160;0.2 &#160; 8:24.29 dockerd&lt;br/&gt;
13284 root &#160; &#160; &#160;20 &#160; 0 &#160;183120 &#160;59008 &#160;41724 S &#160; 2.5 &#160;0.1 &#160; 1:11.53 consul&lt;br/&gt;
13802 root &#160; &#160; &#160; 5 -15 1976632 138672 &#160;38204 S &#160; 2.5 &#160;0.2 &#160; 1:17.95 mongod&lt;br/&gt;
13779 root &#160; &#160; &#160; 5 -15 1698528 108160 &#160;37556 S &#160; 1.9 &#160;0.2 &#160; 0:50.10 mongod&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;supported logs5&lt;/b&gt;: name: &lt;b&gt;jornalctlissue1.logs&lt;/b&gt;&lt;/li&gt;
	&lt;li&gt;&lt;b&gt;supported logs6&lt;/b&gt;: kernel logs name: &lt;b&gt;dmesg.txt&lt;/b&gt;&lt;/li&gt;
	&lt;li&gt;&lt;b&gt;supported logs7&lt;/b&gt;: &lt;b&gt;diagnostic.data&lt;/b&gt;&lt;b&gt;{&lt;/b&gt;}&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Occurance 2:&lt;/b&gt; Same observation were observed &#160; that server did not respond for 16:00:36,16:00:37,16:00:38 timestamps&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;logs1:&lt;/b&gt; name: &lt;b&gt;mongo-27045.log.issue2.gz&lt;/b&gt; : 16:00:39 time stamp came before 16:00:38 and 16:00:36 multiple times:&lt;br/&gt;
2022-08-03T&lt;b&gt;16:00:36&lt;/b&gt;.128+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn2189&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key: 
{ _id: 1 }
&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_1.imsiApnbindings&quot; }&lt;br/&gt;
2022-08-03T16:00:36.128+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn2189&amp;#93;&lt;/span&gt; Only one plan is available; it will be run but will not be cached. ns: imsiApnbindings_1.imsiApnbindings query: &lt;/p&gt;
&lt;div class=&quot;error&quot;&gt;&lt;span class=&quot;error&quot;&gt;Unknown macro: { _id}&lt;/span&gt; &lt;/div&gt;
&lt;p&gt; } sort: {} projection: {}, planSummary: IXSCAN { _id: 1 }&lt;br/&gt;
2022-08-03T16:00:36.128+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn2557&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key:&lt;/p&gt;
{ _id: 1 }
&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_1.imsiApnbindings&quot; }&lt;br/&gt;
2022-08-03T&lt;b&gt;16:00:39&lt;/b&gt;.091+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn2557&amp;#93;&lt;/span&gt; Only one plan is available; it will be run but will not be cached. ns: imsiApnbindings_1.imsiApnbindings query: &lt;/p&gt;
&lt;div class=&quot;error&quot;&gt;&lt;span class=&quot;error&quot;&gt;Unknown macro: { _id}&lt;/span&gt; &lt;/div&gt;
&lt;p&gt; } sort: {} projection: {}, planSummary: IXSCAN { _id: 1 }&lt;br/&gt;
2022-08-03T&lt;b&gt;16:00:36&lt;/b&gt;.128+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn2209&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key:&lt;/p&gt;
{ _id: 1 }
&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_2.imsiApnbindings&quot; }&lt;br/&gt;
2022-08-03T16:00:39.091+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn2209&amp;#93;&lt;/span&gt; Only one plan is available; it will be run but will not be cached. ns: imsiApnbindings_2.imsiApnbindings query: &lt;/p&gt;
&lt;div class=&quot;error&quot;&gt;&lt;span class=&quot;error&quot;&gt;Unknown macro: { _id}&lt;/span&gt; &lt;/div&gt;
&lt;p&gt; } sort: {} projection: {}, planSummary: IXSCAN { _id: 1 }&lt;br/&gt;
2022-08-03T16:00:36.129+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn2212&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key:&lt;/p&gt;
{ _id: 1 }
&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_1.imsiApnbindings&quot; }&lt;br/&gt;
2022-08-03T16:00:36.130+0000 D2 QUERY &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn2462&amp;#93;&lt;/span&gt; Relevant index 0 is kp: { &lt;em&gt;id: 1 } unique name: &apos;(_id&lt;/em&gt;, )&apos; io: { v: 2, key:&lt;/p&gt;
{ _id: 1 }
&lt;p&gt;, name: &quot;&lt;em&gt;id&lt;/em&gt;&quot;, ns: &quot;imsiApnbindings_1.imsiApnbindings&quot; }&lt;/p&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;logs2&lt;/b&gt;: 16:00:36,16:00:37,16:00:38 timestamps are missed in &lt;b&gt;mongostat27045.txt&lt;/b&gt;&lt;br/&gt;
insert query update delete getmore command dirty &#160;used flushes vsize &#160; res &#160; qrw &#160; arw net_in net_out conn &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; set repl &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;time&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 5 &#160; &#160;705 &#160; &#160;652 &#160; &#160; &#160;27 &#160; 168|0 &#160;2.4% 55.6% &#160; &#160; &#160; 0 6.42G 3.72G &#160; 0|0 &#160; 1|1 &#160; 797k &#160; 2.07m &#160;762 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:29.415&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 4 &#160; &#160;731 &#160; &#160;687 &#160; &#160; &#160;26 &#160; 158|0 &#160;2.4% 55.6% &#160; &#160; &#160; 0 6.42G 3.72G &#160; 0|0 &#160; 1|0 &#160; 825k &#160; 2.05m &#160;762 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:30.415&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 7 &#160; &#160;747 &#160; &#160;765 &#160; &#160; &#160;26 &#160; 142|0 &#160;2.5% 55.6% &#160; &#160; &#160; 0 6.42G 3.72G &#160; 0|1 &#160; 1|0 &#160; 862k &#160; 2.27m &#160;762 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:31.417&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 4 &#160; &#160;730 &#160; &#160;708 &#160; &#160; &#160;27 &#160; 145|0 &#160;2.5% 55.6% &#160; &#160; &#160; 0 6.42G 3.72G &#160; 0|0 &#160; 1|0 &#160; 829k &#160; 2.16m &#160;762 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:32.414&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 2 &#160; &#160;719 &#160; &#160;623 &#160; &#160; &#160;26 &#160; 146|0 &#160;2.5% 55.7% &#160; &#160; &#160; 0 6.42G 3.72G &#160; 0|3 &#160; 1|0 &#160; 790k &#160; 1.93m &#160;762 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:33.422&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 3 &#160; &#160;713 &#160; &#160;729 &#160; &#160; &#160;27 &#160; 212|0 &#160;2.5% 55.7% &#160; &#160; &#160; 0 6.42G 3.72G &#160; 0|0 &#160; 1|0 &#160; 831k &#160; 2.27m &#160;762 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:34.416&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 4 &#160; &#160;761 &#160; &#160;703 &#160; &#160; &#160;25 &#160; 168|0 &#160;2.6% 55.7% &#160; &#160; &#160; 0 6.42G 3.72G &#160; 0|1 &#160; 1|0 &#160; 849k &#160; 2.10m &#160;762 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:35.414&lt;br/&gt;
&#160; &#160; *0 &#160; &#160; 1 &#160; &#160;196 &#160; &#160;158 &#160; &#160; &#160; 6 &#160; &#160;37|0 &#160;2.6% 55.7% &#160; &#160; &#160; 0 6.42G 3.72G 0|196 4|128 &#160; 211k &#160; &#160;422k &#160;762 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:39.121&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;28 &#160; 1594 &#160; &#160;716 &#160; &#160; &#160; 0 &#160; 358|0 &#160;2.6% 55.7% &#160; &#160; &#160; 0 6.36G 3.76G 0|118 1|128 &#160;1.51m &#160; 2.76m &#160;570 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:39.227&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;*0 &#160; &#160; 58 &#160; &#160; 21 &#160; &#160; &#160;31 &#160;1610|0 &#160;2.6% 55.7% &#160; &#160; &#160; 0 5.85G 3.79G &#160; 0|0 &#160; 1|0 &#160; 393k &#160; 6.58m &#160;210 rs-app_shard-imsi-1 &#160;PRI Aug &#160;3 16:00:39.416&lt;/li&gt;
	&lt;li&gt;&lt;b&gt;logs3:&lt;/b&gt; 16:00:36,16:00:37,16:00:38 timestamps are missed in &lt;b&gt;mongotop27045.txt&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;ns &#160; &#160;total &#160; &#160;read &#160; &#160;write &#160; &#160;2022-08-03T16:00:35Z&lt;br/&gt;
imsiApnbindings_1.imsiApnbindings &#160; &#160;379ms &#160; &#160; 0ms &#160; &#160;379ms&lt;br/&gt;
imsiApnbindings_2.imsiApnbindings &#160; &#160;376ms &#160; &#160; 0ms &#160; &#160;376ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;local.oplog.rs &#160; &#160; 13ms &#160; &#160;13ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; admin.system.keys &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.roles &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.users &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.version &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160;config.system.sessions &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; config.transactions &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; imsiApnbindings.imsiApnbindings &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;ns &#160; &#160;total &#160; &#160;read &#160; &#160;write &#160; &#160;2022-08-03T16:00:39Z&lt;br/&gt;
imsiApnbindings_2.imsiApnbindings &#160; &#160; 77ms &#160; &#160; 0ms &#160; &#160; 77ms&lt;br/&gt;
imsiApnbindings_1.imsiApnbindings &#160; &#160; 53ms &#160; &#160; 0ms &#160; &#160; 53ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;local.oplog.rs &#160; &#160; &#160;3ms &#160; &#160; 3ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; admin.system.keys &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.roles &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.users &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160;admin.system.version &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160;config.system.sessions &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; config.transactions &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;br/&gt;
&#160; imsiApnbindings.imsiApnbindings &#160; &#160; &#160;0ms &#160; &#160; 0ms &#160; &#160; &#160;0ms&lt;/p&gt;

&lt;p&gt;&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;ns &#160; &#160; &#160; total &#160; &#160; &#160; read &#160; &#160; &#160; write &#160; &#160;2022-08-03T16:00:40Z&lt;br/&gt;
imsiApnbindings_2.imsiApnbindings &#160; &#160;805523ms &#160; &#160; 5389ms &#160; &#160;800133ms&lt;br/&gt;
imsiApnbindings_1.imsiApnbindings &#160; &#160;773690ms &#160; &#160;10655ms &#160; &#160;763034ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;local.oplog.rs &#160; &#160; &#160; &#160;16ms &#160; &#160; &#160; 16ms &#160; &#160; &#160; &#160; 0ms&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; admin.system.keys &#160; &#160; &#160; &#160; 0ms &#160; &#160; &#160; &#160;0ms &#160; &#160; &#160; &#160; 0ms&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;logs4: top_vm_output.log&lt;/b&gt;&lt;br/&gt;
top - 16:00:36 up &#160;2:11, &#160;3 users, &#160;load average: 10.28, 8.53, 8.06&lt;br/&gt;
Tasks: 393 total, &#160; 1 running, 294 sleeping, &#160; 3 stopped, &#160; 0 zombie&lt;br/&gt;
%Cpu(s): 35.0 us, &#160;7.8 sy, &#160;0.0 ni, 45.7 id, &#160;8.4 wa, &#160;0.0 hi, &#160;3.1 si, &#160;0.0 st&lt;br/&gt;
KiB Mem : 65969880 total, 26656104 free, 19064012 used, 20249764 buff/cache&lt;br/&gt;
KiB Swap: &#160;1046524 total, &#160;1046524 free, &#160; &#160; &#160; &#160;0 used. 37792588 avail Mem&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&#160; PID USER &#160; &#160; &#160;PR &#160;NI &#160; &#160;VIRT &#160; &#160;RES &#160; &#160;SHR S &#160;%CPU %MEM &#160; &#160; TIME+ COMMAND&lt;br/&gt;
27401 root &#160; &#160; &#160; 5 -15 6728284 3.724g &#160;40360 S &#160;74.2 &#160;5.9 108:10.39 mongod&lt;br/&gt;
&#160;4489 root &#160; &#160; &#160;20 &#160; 0 &#160; &#160;4792 &#160; 1572 &#160; 1288 D &#160;70.5 &#160;0.0 &#160; 0:28.74 gzip&lt;br/&gt;
13792 root &#160; &#160; &#160; 5 -15 6111488 3.921g &#160;39568 S &#160;59.6 &#160;6.2 &#160;29:27.32 mongod&lt;br/&gt;
13800 root &#160; &#160; &#160; 5 -15 6077460 3.894g &#160;39656 S &#160;14.2 &#160;6.2 &#160;28:30.81 mongod&lt;br/&gt;
13775 root &#160; &#160; &#160; 5 -15 6128068 3.936g &#160;39696 S &#160;13.9 &#160;6.3 &#160;29:23.52 mongod&lt;br/&gt;
&#160;1369 root &#160; &#160; &#160;20 &#160; 0 2909852 101944 &#160;52524 S &#160; 8.3 &#160;0.2 &#160;20:01.65 dockerd&lt;br/&gt;
13802 root &#160; &#160; &#160; 5 -15 1991472 153300 &#160;39548 S &#160; 2.3 &#160;0.2 &#160; 3:21.02 mongod&lt;br/&gt;
&#160;8176 root &#160; &#160; &#160;20 &#160; 0 &#160;687072 &#160;16964 &#160; 9080 S &#160; 2.0 &#160;0.0 &#160; 1:46.28 node_exporter&lt;br/&gt;
13284 root &#160; &#160; &#160;20 &#160; 0 &#160;183120 &#160;59240 &#160;41792 S &#160; 1.7 &#160;0.1 &#160; 3:05.12 consul&lt;br/&gt;
&#8211;&lt;br/&gt;
top - 16:00:40 up &#160;2:11, &#160;3 users, &#160;load average: 10.28, 8.53, 8.06&lt;br/&gt;
Tasks: 395 total, &#160; 3 running, 293 sleeping, &#160; 3 stopped, &#160; 0 zombie&lt;br/&gt;
%Cpu(s): 31.9 us, &#160;6.8 sy, &#160;0.0 ni, 35.4 id, 23.2 wa, &#160;0.0 hi, &#160;2.8 si, &#160;0.0 st&lt;br/&gt;
KiB Mem : 65969880 total, 26688584 free, 19027816 used, 20253480 buff/cache&lt;br/&gt;
KiB Swap: &#160;1046524 total, &#160;1046524 free, &#160; &#160; &#160; &#160;0 used. 37828984 avail Mem&lt;/p&gt;

&lt;p&gt;&#160; PID USER &#160; &#160; &#160;PR &#160;NI &#160; &#160;VIRT &#160; &#160;RES &#160; &#160;SHR S &#160;%CPU %MEM &#160; &#160; TIME+ COMMAND&lt;br/&gt;
13792 root &#160; &#160; &#160; 5 -15 6111488 3.921g &#160;39568 S 104.4 &#160;6.2 &#160;29:30.86 mongod&lt;br/&gt;
27401 root &#160; &#160; &#160; 5 -15 6199976 3.789g &#160;40360 S &#160;33.0 &#160;6.0 108:11.51 mongod&lt;br/&gt;
13775 root &#160; &#160; &#160; 5 -15 6128068 3.936g &#160;39696 S &#160;22.7 &#160;6.3 &#160;29:24.29 mongod&lt;br/&gt;
&#160;4489 root &#160; &#160; &#160;20 &#160; 0 &#160; &#160;4792 &#160; 1572 &#160; 1288 R &#160;20.9 &#160;0.0 &#160; 0:29.45 gzip&lt;br/&gt;
13800 root &#160; &#160; &#160; 5 -15 6077460 3.894g &#160;39656 S &#160;19.2 &#160;6.2 &#160;28:31.46 mongod&lt;br/&gt;
&#160;1369 root &#160; &#160; &#160;20 &#160; 0 2909852 102104 &#160;52524 S &#160; 6.8 &#160;0.2 &#160;20:01.88 dockerd&lt;br/&gt;
13284 root &#160; &#160; &#160;20 &#160; 0 &#160;183120 &#160;59240 &#160;41792 S &#160; 2.7 &#160;0.1 &#160; 3:05.21 consul&lt;br/&gt;
13802 root &#160; &#160; &#160; 5 -15 1991472 153300 &#160;39548 S &#160; 2.7 &#160;0.2 &#160; 3:21.11 mongod&lt;br/&gt;
13779 root &#160; &#160; &#160; 5 -15 1698528 108160 &#160;37556 S &#160; 1.8 &#160;0.2 &#160; 2:10.46 mongod&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;logs5: jornalctlissue2.logs&lt;/b&gt;&lt;/li&gt;
	&lt;li&gt;&lt;b&gt;logs6: thread.txt&lt;/b&gt;&lt;br/&gt;
2022-08-03 16:00:34 -&amp;gt; 816&lt;br/&gt;
1624&lt;br/&gt;
2022-08-03 16:00:36 -&amp;gt; 816&lt;br/&gt;
1624&lt;br/&gt;
2022-08-03 16:00:37 -&amp;gt; 816&lt;br/&gt;
1619&lt;br/&gt;
2022-08-03 16:00:40 -&amp;gt; 366&lt;br/&gt;
724&lt;br/&gt;
2022-08-03 16:00:41 -&amp;gt; 1207&lt;/li&gt;
	&lt;li&gt;&lt;b&gt;logs7 and 8:&lt;/b&gt; &lt;b&gt;dmesg.txt,diagnostic.data&lt;/b&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;NOTE:&lt;/b&gt; all 2 occurances logs are combined as &lt;b&gt;combinedmongologs.tar.gz&lt;/b&gt; and will be uploaded to uploader portal.&lt;/p&gt;

&lt;p&gt;Kindly provide the &lt;b&gt;uploader portal link&lt;/b&gt; as it is showing as expired for me.&lt;/p&gt;

&lt;p&gt;Thanks,&lt;br/&gt;
Kapil&lt;/p&gt;</comment>
                            <comment id="4713660" author="JIRAUSER1264730" created="Fri, 29 Jul 2022 05:55:02 +0000"  >&lt;p&gt;Hi Christopher,&lt;/p&gt;

&lt;p&gt;We tried to run gdb command several times but whenever we are running gdb command all connections are disconnecting on the primary. Please find some mongo logs as a reference given below when we run gdb command:&lt;/p&gt;

&lt;p&gt;2022-07-27T10:33:30.814+0000 I CONNPOOL &lt;span class=&quot;error&quot;&gt;&amp;#91;RS&amp;#93;&lt;/span&gt; Dropping all pooled connections to &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:bb&amp;#93;&lt;/span&gt;:27023 due to NetworkInterfaceExceededTimeLimit: Connection pool has been idle for longer than the host timeout&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;2022-07-27T10:35:11.262+0000 I REPL &#160; &#160; &lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; Member &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:11&amp;#93;&lt;/span&gt;:27023 is now in state RS_DOWN&lt;br/&gt;
2022-07-27T10:35:11.262+0000 I REPL &#160; &#160; &lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; Member &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:9&amp;#93;&lt;/span&gt;:27023 is now in state RS_DOWN&lt;br/&gt;
2022-07-27T10:35:11.262+0000 I REPL &#160; &#160; &lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; Member &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:57&amp;#93;&lt;/span&gt;:27023 is now in state RS_DOWN&lt;br/&gt;
2022-07-27T10:35:11.262+0000 I REPL &#160; &#160; &lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; Member &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:ab&amp;#93;&lt;/span&gt;:27023 is now in state RS_DOWN&lt;br/&gt;
2022-07-27T10:35:11.262+0000 I REPL &#160; &#160; &lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; can&apos;t see a majority of the set, relinquishing primary&lt;br/&gt;
2022-07-27T10:35:11.262+0000 I REPL &#160; &#160; &lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; Stepping down from primary in response to heartbeat&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;2022-07-27T10:35:11.274+0000 I REPL &#160; &#160; &lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; transition to SECONDARY from PRIMARY&lt;br/&gt;
2022-07-27T10:35:11.277+0000 I NETWORK &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; Skip closing connection for connection # 1691&lt;br/&gt;
2022-07-27T10:35:11.277+0000 I NETWORK &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; Skip closing connection for connection # 1690&lt;br/&gt;
2022-07-27T10:35:11.277+0000 I NETWORK &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;replexec-14&amp;#93;&lt;/span&gt; Skip closing connection for connection # 1689&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;2022-07-27T10:35:11.288+0000 I NETWORK &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn1431&amp;#93;&lt;/span&gt; Error sending response to client: SocketException: Broken pipe. Ending connection from &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:bd&amp;#93;&lt;/span&gt;:43456 (connection id: 1431)&lt;br/&gt;
2022-07-27T10:35:11.288+0000 I NETWORK &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn1602&amp;#93;&lt;/span&gt; Error sending response to client: SocketException: Broken pipe. Ending connection from &lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:c2&amp;#93;&lt;/span&gt;:51016 (connection id: 1602)&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;So we are unable to collect gdb because of above issue, could you please suggest any other alternative.&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Thanks,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4681607" author="JIRAUSER1264730" created="Fri, 15 Jul 2022 11:35:58 +0000"  >&lt;p&gt;Hi Christopher,&lt;/p&gt;

&lt;p&gt;I am still working on gdb output, in the mean while we had some observation given below, whenever we are getting high query response &lt;b&gt;arw&lt;/b&gt; value in mongostat value is increasing(usually it value is 1or 2), that is increasing load and cpu on mongo member(top output, mongostat and log message is given below for reference)&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Instance&lt;/b&gt; : 14:01:23&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;Mongo log message:&lt;/b&gt; /data/mongod-node/db/mongo-27039.log:2022-07-12T14:01:23.462+0000 I&#160; COMMAND&#160; &lt;span class=&quot;error&quot;&gt;&amp;#91;conn2748&amp;#93;&lt;/span&gt; command ipv6bindings_1.ipv6bindings command: find { find: &quot;ipv6bindings&quot;, filter: 
{ _id: &quot;3201:0000:0000:1452&quot; }
&lt;p&gt;, limit: 1, singleBatch: true, $db: &quot;ipv6bindings_1&quot;, $clusterTime:&lt;br/&gt;
Unknown macro: { clusterTime}&lt;br/&gt;
} planSummary: IDHACK keysExamined:1 docsExamined:1 cursorExhausted:1 numYields:1 nreturned:1 reslen:514 locks:&lt;br/&gt;
{ ReplicationStateTransition: Unknown macro: &lt;/p&gt;
{ acquireCount}
&lt;p&gt;, Global: { acquireCount:&lt;/p&gt;
{ r: 2 }
&lt;p&gt;}, Database: &lt;/p&gt;
&lt;div class=&quot;error&quot;&gt;&lt;span class=&quot;error&quot;&gt;Unknown macro: { acquireCount}&lt;/span&gt; &lt;/div&gt;
&lt;p&gt;storage:{} protocol:op_msg &lt;b&gt;818ms&lt;/b&gt;&lt;/p&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&#160;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;Top output:&lt;/b&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;top - 14:01:23 up 80 days, 22:06,&#160; 1 user,&#160; load average: &lt;b&gt;29.54&lt;/b&gt;, 15.91, 12.82&lt;/p&gt;

&lt;p&gt;Tasks: 378 total,&#160;&#160; 2 running, 278 sleeping,&#160;&#160; 0 stopped,&#160;&#160; 0 zombie&lt;/p&gt;

&lt;p&gt;%Cpu(s): 53.9 us,&#160; 8.1 sy,&#160; 0.0 ni, 30.5 id,&#160; 0.0 wa,&#160; 0.0 hi,&#160; 7.4 si,&#160; 0.0 st&lt;/p&gt;

&lt;p&gt;KiB Mem : 65969880 total, 30794112 free, 22055376 used, 13120392 buff/cache&lt;/p&gt;

&lt;p&gt;KiB Swap:&#160; 1046524 total,&#160;&#160; 992300 free,&#160;&#160;&#160; 54224 used. 33317032 avail Mem&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&#160; PID USER&#160;&#160;&#160;&#160;&#160; PR&#160; NI&#160;&#160;&#160; VIRT&#160;&#160;&#160; RES&#160;&#160;&#160; SHR S&#160; %CPU %MEM&#160;&#160;&#160;&#160; TIME+ COMMAND&lt;/p&gt;

&lt;p&gt;8540 root&#160;&#160;&#160;&#160;&#160;&#160; 5 -15 6569936 3.722g&#160; 42096 S &lt;b&gt;182.5&lt;/b&gt;&#160; 5.9&#160;&#160; 6946:53 mongod&lt;/p&gt;

&lt;p&gt;8536 root&#160;&#160;&#160;&#160;&#160;&#160; 5 -15 6585680 3.783g&#160; 41836 S 161.9&#160; 6.0&#160;&#160; 6973:49 mongod&lt;/p&gt;

&lt;p&gt;8538 root&#160;&#160;&#160;&#160;&#160;&#160; 5 -15 6405456 3.947g&#160; 41620 S&#160; 49.7&#160; 6.3&#160;&#160; 5158:44 mongod&lt;/p&gt;

&lt;p&gt;8537 root&#160;&#160;&#160;&#160;&#160;&#160; 5 -15 6526672 3.983g&#160; 42000 S&#160; 45.7&#160; 6.3&#160;&#160; 5151:51 mongod&lt;/p&gt;

&lt;p&gt;18232 root&#160;&#160;&#160;&#160;&#160; 20&#160;&#160; 0&#160; 963048 146448&#160;&#160; 9436 S&#160; 16.9&#160; 0.2&#160; 14147:33 weaver&lt;/p&gt;

&lt;p&gt;25927 root&#160;&#160;&#160;&#160;&#160; 20&#160;&#160; 0&#160; 217616 115828&#160; 62588 S&#160; 14.9&#160; 0.2&#160;&#160; 1974:22 consul&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;Mongostat(for 27039 port):&lt;/b&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160; &#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;host insert query update delete getmore command dirty&#160; used &#160;&#160;&#160;flushes vsize&#160;&#160; res&#160; qrw&#160;&#160;&#160; arw net_in net_out conn&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160; set repl&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160; time&lt;/p&gt;

&lt;p&gt;&#160;**&#160;&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:e&amp;#93;&lt;/span&gt;:27039&#160;&#160;&#160;&#160; *0&#160;&#160; 448&#160;&#160;&#160; 954&#160;&#160;&#160; 299&#160;&#160;&#160;&#160; 376&#160;&#160; 453|0&#160; 1.5% 76.4%&#160;&#160;&#160;&#160;&#160;&#160; 0 6.26G 3.72G&#160; 0|0&#160;&#160;&#160; 1|0&#160; 1.26m&#160;&#160; 1.63m&#160; 317 rs-sess-ipv6-shard-20&#160; PRI Jul 12 14:01:22.424&lt;/p&gt;

&lt;p&gt;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160; &#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;localhost:27039&#160;&#160;&#160;&#160; &lt;b&gt;0&#160;&#160; 154&#160;&#160;&#160; 311&#160;&#160;&#160;&#160; 53&#160;&#160;&#160;&#160;&#160; 68&#160;&#160; 132|0&#160; 1.4% 76.3%&#160;&#160;&#160;&#160;&#160;&#160; 0 6.26G 3.72G &#160;&#160;&#160;*0|52 83|128&lt;/b&gt;&#160;&#160; 307k&#160;&#160;&#160; 393k&#160; 317 rs-sess-ipv6-shard-20&#160; PRI Jul 12 14:01:23.374&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:e&amp;#93;&lt;/span&gt;:27039&#160;&#160;&#160;&#160; &lt;b&gt;0&#160;&#160; 141&#160;&#160;&#160; 277&#160;&#160;&#160;&#160; 37&#160;&#160;&#160;&#160;&#160; 41&#160;&#160;&#160; 98|0&#160; 1.4% 76.3%&#160;&#160;&#160;&#160;&#160;&#160; 0 6.26G 3.72G &#160;&#160;&#160;&#160;&#160;*24|77&#160; 11|39&lt;/b&gt;&#160;&#160; 253k&#160;&#160;&#160; 338k&#160; 317 rs-sess-ipv6-shard-20&#160; PRI Jul 12 14:01:23.529&lt;/p&gt;

&lt;p&gt;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160; &#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;localhost:27039&#160;&#160;&#160;&#160; *0&#160;&#160; 596&#160;&#160; 1525&#160;&#160;&#160; 440&#160;&#160;&#160;&#160; 305&#160;&#160; 391|0&#160; 1.5% 76.4%&#160;&#160;&#160;&#160;&#160;&#160; 0 6.26G 3.72G&#160;&#160; 0|0&#160;&#160;&#160; 1|1&#160; 1.55m&#160;&#160; 2.18m&#160; 317 rs-sess-ipv6-shard-20&#160; PRI Jul 12 14:01:24.374&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;error&quot;&gt;&amp;#91;2606:ae00:3001:8311:172:16:244:e&amp;#93;&lt;/span&gt;:27039&#160;&#160;&#160;&#160; *0&#160;&#160; 669&#160;&#160; 1703&#160;&#160;&#160; 509&#160;&#160;&#160;&#160; 363&#160;&#160; 460|0&#160; 1.5% 76.4%&#160;&#160;&#160;&#160;&#160;&#160; 0 6.26G 3.72G&#160;&#160; 0|1&#160;&#160;&#160; 1|0&#160; 1.76m&#160;&#160; 2.47m&#160; 317 rs-sess-ipv6-shard-20&#160; PRI Jul 12 14:01:24.422&lt;/p&gt;

&lt;p&gt;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160; &#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;localhost:27039&#160;&#160;&#160;&#160; *0&#160;&#160; 574&#160;&#160;&#160; 915&#160;&#160;&#160; 302&#160;&#160;&#160;&#160; 355&#160;&#160; 430|0&#160; 1.5% 76.4%&#160;&#160;&#160;&#160;&#160;&#160; 0 6.26G 3.72G&#160;&#160; 0|0&#160;&#160;&#160; 1|1&#160; 1.24m&#160;&#160; 1.65m&#160; 317 rs-sess-ipv6-shard-20&#160; PRI Jul 12 14:01:25.375&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Information about our architecture:&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;Sharding Db details:&lt;/b&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;Here sharding db we are using as a meta data db which contains buckets and shards information. Application connects to sharding db and gets information about shards and based on bucket ids, stores data in uniform manner across all the shards.&lt;/p&gt;

&lt;p&gt;&lt;b&gt;sharding db member details(running inside container):&lt;/b&gt;&lt;br/&gt;
mongod --keyFile=/mongodb.key --ipv6 --bind_ip_all --replSet imsi-msisdn-sharddb --port 27019 --dbpath=/data/db/sharddb-27019 --logpath /data/db/mongo-27019.log --logappend --logRotate reopen&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Actual shards information:&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;We have 2 sites&lt;span class=&quot;error&quot;&gt;&amp;#91;each sites have 7 VMs(each VM has 4 mongod data member and 3 arbiters running inside the container)&amp;#93;&lt;/span&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Each VM RAM:&lt;/b&gt; 64 GB, each mongod data member has 7.6GB tmpfs as storage and 4.4 Gb as wiredTiger cache&lt;/p&gt;

&lt;p&gt;&lt;b&gt;ps -efww of one of the container:&lt;/b&gt;&lt;span class=&quot;error&quot;&gt;&amp;#91;each container has 1 primary and 3 secondary&amp;#39;s and all secondaries and all members are from different replica sets&amp;#93;&lt;/span&gt;&lt;/p&gt;

&lt;p&gt;root &#160; &#160; &#160; 993 &#160; &#160; 1 &#160;1 Jul01 ? &#160; &#160; &#160; &#160;04:46:57 mongod --keyFile=/mongodb.key --enableMajorityReadConcern false --ipv6 --bind_ip_all --port 27021 --dbpath=/data/db/wt-27021 --replSet rs-shard-2 --quiet --slowms 500 --logpath /data/db/mongo-27021.log --setParameter diagnosticDataCollectionEnabled=false --logappend --oplogSize 100 --logRotate reopen --wiredTigerCacheSizeGB &lt;b&gt;.26 ----arbiter&lt;/b&gt;&lt;br/&gt;
root &#160; &#160; &#160;1034 &#160; &#160; 1 &#160;1 Jul01 ? &#160; &#160; &#160; &#160;04:37:54 mongod --keyFile=/mongodb.key --enableMajorityReadConcern false --ipv6 --bind_ip_all --port 27022 --dbpath=/data/db/wt-27022 --replSet rs-shard-3 --quiet --slowms 500 --logpath /data/db/mongo-27022.log --setParameter diagnosticDataCollectionEnabled=false --logappend --oplogSize 100 --logRotate reopen --wiredTigerCacheSizeGB &lt;b&gt;.26 ----arbiter&lt;/b&gt;&lt;br/&gt;
root &#160; &#160; &#160;1227 &#160; &#160; 1 &#160;1 Jul01 ? &#160; &#160; &#160; &#160;04:49:42 mongod --keyFile=/mongodb.key --enableMajorityReadConcern false --ipv6 --bind_ip_all --port 27025 --dbpath=/data/db/wt-27025 --replSet rs-shard-6 --quiet --slowms 500 --logpath /data/db/mongo-27025.log --setParameter diagnosticDataCollectionEnabled=false --logappend --oplogSize 100 --logRotate reopen --wiredTigerCacheSizeGB .&lt;b&gt;26 ----arbiter&lt;/b&gt;&lt;br/&gt;
root &#160; &#160; &#160;1265 &#160; &#160; 1 30 Jul01 ? &#160; &#160; &#160; &#160;4-04:14:17 mongod --keyFile=/mongodb.key --enableMajorityReadConcern false --ipv6 --bind_ip_all --port 27028 --dbpath=/data/db/wt-27028 --replSet rs-shard-ipv6-1 --quiet --slowms 500 --logpath /data/db/mongo-27028.log --setParameter diagnosticDataCollectionEnabled=true --logappend --oplogSize 3221 --logRotate reopen --wiredTigerCacheSizeGB &lt;b&gt;4.40 ----datamember&lt;/b&gt;&lt;br/&gt;
root &#160; &#160; &#160;1601 &#160; &#160; 1 41 Jul01 ? &#160; &#160; &#160; &#160;5-18:43:19 mongod --keyFile=/mongodb.key --enableMajorityReadConcern false --ipv6 --bind_ip_all --port 27030 --dbpath=/data/db/wt-27030 --replSet rs-shard-ipv6-3 --quiet --slowms 500 --logpath /data/db/mongo-27030.log --setParameter diagnosticDataCollectionEnabled=true --logappend --oplogSize 3221 --logRotate reopen --wiredTigerCacheSizeGB &lt;b&gt;4.40 ----datamember&lt;/b&gt;&lt;br/&gt;
root &#160; &#160; &#160;1734 &#160; &#160; 1 43 Jul01 ? &#160; &#160; &#160; &#160;6-01:01:29 mongod &lt;del&gt;{&lt;/del&gt;}keyFile=/mongodb.key &lt;del&gt;{&lt;/del&gt;}enableMajorityReadConcern false &lt;del&gt;{&lt;/del&gt;}ipv6 -&lt;del&gt;bind_ip_all --port 27031 --dbpath=/data/db/wt-27031 --replSet rs-shard-ipv6-4 --quiet --slowms 500 --logpath /data/db/mongo-27031.log --setParameter diagnosticDataCollectionEnabled=true --logappend --oplogSize 3221 --logRotate reopen --wiredTigerCacheSizeGB &lt;b&gt;4.40&lt;/b&gt;&#160;&lt;b&gt;{&lt;/b&gt;}&lt;b&gt;{&lt;/b&gt;}&lt;/del&gt;&lt;b&gt;---datamember&lt;/b&gt;{&lt;b&gt;}{&lt;tt&gt;}&lt;/tt&gt;&lt;/b&gt;&lt;br/&gt;
root &#160; &#160; 32116 &#160; &#160; 1 27 Jul04 ? &#160; &#160; &#160; &#160;2-22:35:32 mongod &lt;del&gt;{&lt;/del&gt;}keyFile=/mongodb.key &lt;del&gt;{&lt;/del&gt;}enableMajorityReadConcern false &lt;del&gt;{&lt;/del&gt;}ipv6 -&lt;del&gt;bind_ip_all --port 27029 --dbpath=/data/db/wt-27029 --replSet rs-shard-ipv6-2 --quiet --slowms 500 --logpath /data/db/mongo-27029.log --setParameter diagnosticDataCollectionEnabled=true --logappend --oplogSize 3221 --logRotate reopen --wiredTigerCacheSizeGB &lt;b&gt;4.40&lt;/b&gt;&#160;&lt;b&gt;{&lt;/b&gt;}&lt;b&gt;{&lt;/b&gt;}&lt;/del&gt;&lt;b&gt;---datamember&lt;/b&gt;{&lt;b&gt;}{&lt;tt&gt;}&lt;/tt&gt;&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;df -hT inside one of the container:&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;root@mongo-s109:/# df -hT&lt;br/&gt;
Filesystem &#160; &#160; Type &#160; &#160; Size &#160;Used Avail Use% Mounted on&lt;br/&gt;
overlay &#160; &#160; &#160; &#160;overlay &#160; 97G &#160; 24G &#160; 70G &#160;26% /&lt;br/&gt;
tmpfs &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160; 64M &#160; &#160; 0 &#160; 64M &#160; 0% /dev&lt;br/&gt;
tmpfs &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160; 32G &#160; &#160; 0 &#160; 32G &#160; 0% /sys/fs/cgroup&lt;br/&gt;
shm &#160; &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160; 64M &#160; &#160; 0 &#160; 64M &#160; 0% /dev/shm&lt;br/&gt;
/dev/sda3 &#160; &#160; &#160;ext4 &#160; &#160; &#160;97G &#160; 24G &#160; 70G &#160;26% /w&lt;br/&gt;
tmpfs &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160;500M &#160;301M &#160;200M &#160;61% /data/db/wt-27025 &lt;b&gt;----arbiter&lt;/b&gt;&lt;br/&gt;
tmpfs &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160;500M &#160;301M &#160;200M &#160;61% /data/db/wt-27022 &lt;b&gt;----arbiter&lt;/b&gt;&lt;br/&gt;
tmpfs &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160;7.6G &#160;1.5G &#160;6.2G &#160;20% /data/db/wt-27031 &lt;b&gt;----datamember&lt;/b&gt;&lt;br/&gt;
tmpfs &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160;7.6G &#160;1.5G &#160;6.2G &#160;20% /data/db/wt-27029 &lt;b&gt;----datamember&lt;/b&gt;&lt;br/&gt;
tmpfs &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160;7.6G &#160;1.5G &#160;6.2G &#160;20% /data/db/wt-27028 &lt;b&gt;----datamember&lt;/b&gt;&lt;br/&gt;
tmpfs &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160;7.6G &#160;1.5G &#160;6.2G &#160;20% /data/db/wt-27030 &lt;b&gt;----datamember&lt;/b&gt;&lt;br/&gt;
tmpfs &#160; &#160; &#160; &#160; &#160;tmpfs &#160; &#160;500M &#160;301M &#160;200M &#160;61% /data/db/wt-27021 &lt;b&gt;----arbiter&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;NOTE: We are running site 1 as 4.0.27 and site 2 as 4.2.20, no high query response issue was seen on site 1 , but only site 2 we are seeing it. Out of 4 members some members are from site 1 and some are from site2, and all primaries are equally distributed on both sites.&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Thanks,&lt;br/&gt;
Kapil&lt;/p&gt;</comment>
                            <comment id="4667355" author="JIRAUSER1265262" created="Fri, 8 Jul 2022 21:04:27 +0000"  >&lt;p&gt;Kapil,&lt;/p&gt;

&lt;p&gt;That issue should be fixed by &lt;a href=&quot;https://jira.mongodb.org/browse/SERVER-56054&quot; title=&quot;Change minThreads value for replication writer thread pool to 0&quot; class=&quot;issue-link&quot; data-issue-key=&quot;SERVER-56054&quot;&gt;&lt;del&gt;SERVER-56054&lt;/del&gt;&lt;/a&gt; which affects 4.2.15, below the version you&apos;re running, so I&apos;m not sure it is fully related. I&apos;d also be confused about it resurfacing if you&apos;ve resolved it the glibc issue yourself already.&lt;/p&gt;

&lt;p&gt;Just to revisit some items, could you &lt;b&gt;please clarify your topology?&lt;/b&gt; You mentioned you are running a sharded cluster, but are not using a mongos. This isn&apos;t a standard/recommended way of setting up a sharded cluster. Please explain:&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;How many shards are in your sharded cluster?&lt;/li&gt;
	&lt;li&gt;If you are not using a mongos, how are you handling routing?&lt;/li&gt;
	&lt;li&gt;You have a lot of memory available on the system running the primary - do you share the tmpfs with this memory pool or with any other services?&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;Looking at what we have available, it looks like this is being used as a replica set only, so I would want to confirm this before making any assumptions.&lt;/p&gt;

&lt;p&gt;In addition, because we are not seeing anything that directly implicates the mongod at the time of latency yet, could you please get us the following information:&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Tar up the information in the following directories and upload it:&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;/var/log/dmesg&lt;/li&gt;
	&lt;li&gt;/var/log/messages&lt;/li&gt;
	&lt;li&gt;/var/log/syslog&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;Re-acquire the logs and FTDC without --quiet (and at the same log level as before)&lt;/b&gt; - it looks like some diagnostic information may have been omitted as a result.&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Get a stack trace at the time of the latency&lt;/b&gt; (this may be difficult, but we&apos;re running out of exact options to see what is truly happening at this time)&lt;/p&gt;
&lt;p/&gt;
&lt;div id=&quot;syntaxplugin&quot; class=&quot;syntaxplugin&quot; style=&quot;border: 1px dashed #bbb; border-radius: 5px !important; overflow: auto; max-height: 30em;&quot;&gt;
&lt;table cellspacing=&quot;0&quot; cellpadding=&quot;0&quot; border=&quot;0&quot; width=&quot;100%&quot; style=&quot;font-size: 1em; line-height: 1.4em !important; font-weight: normal; font-style: normal; color: black;&quot;&gt;
		&lt;tbody &gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;  margin-top: 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;# collect stack trace samples with gdb&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   margin-bottom: 10px;  width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;gdb -p $(pidof mongod) -batch -ex &lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&apos;thread apply all bt&apos;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; &amp;gt;gdb.txt&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
			&lt;/tbody&gt;
&lt;/table&gt;
&lt;/div&gt;
&lt;p/&gt;

&lt;p&gt;Christopher&lt;/p&gt;</comment>
                            <comment id="4666468" author="JIRAUSER1264730" created="Fri, 8 Jul 2022 15:44:47 +0000"  >&lt;p&gt;Hi Christopher,&lt;/p&gt;

&lt;p&gt;I have uploaded logs before 00:45:56 also(file name : &lt;b&gt;mongo-27029.log_before45_56.gz)&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;Thanks for your analysis. I have below queries for your recommendations:&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;consider the use of an additional drive to separate your data and journaling&lt;b&gt;{&lt;/b&gt;}&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; here we are using tmpfs(RAM) for both data storing and journaling, not using disk.&lt;/b&gt;&#160;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;tune your kernel dirty threshold&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; As we are using same environment for 4.0.27 site as well, and we tried 3 times same repro scenerio(power off and on), but we did not get high query response on 4.0.27.&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;As we are using mongod in docker environment, is there any specific tuning we need do for it?&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;check for other processes that may be using your disk at the time before query latency occurs.&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; We did not find any additional process which is consuming disk, is there any possibility that mongo itself is not responding like we face issue in&lt;/b&gt; &#160;&lt;b&gt;&lt;a href=&quot;https://jira.mongodb.org/browse/SERVER-63402&quot; title=&quot;High query response time for find operation in mongo 4.0.27 with mmap storage engine with random intervals (5/7/12/20 hours)&quot; class=&quot;issue-link&quot; data-issue-key=&quot;SERVER-63402&quot;&gt;&lt;del&gt;SERVER-63402&lt;/del&gt;&lt;/a&gt;(though we fixed it by adding glibc fix)&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Thanks,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4666053" author="JIRAUSER1265262" created="Fri, 8 Jul 2022 13:54:04 +0000"  >&lt;p&gt;Kapil,&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;image-wrap&quot; style=&quot;&quot;&gt;&lt;img src=&quot;https://jira.mongodb.org/secure/attachment/389200/389200_image-2022-07-08-09-23-16-585.png&quot; width=&quot;100%&quot; style=&quot;border: 0px solid black&quot; /&gt;&lt;/span&gt;&lt;/p&gt;

&lt;p&gt;The first thing that seems to be happening in this timeline is that something is using up system memory and maxes out your disk utilization (point A: 2022-07-05T00:45:48.011Z) BEFORE you experience latency/stalling of mongod at point B (2022-07-05T00:45:59.076Z). However, without the logs I can&apos;t discern exactly what is happening with the primary node at that time. The logs would indicate if there was a slow query at this point that precipitated this disk utilization and subsequent stalling.&lt;/p&gt;

&lt;p&gt;However, given that you run the same query filter at different points and with differing performance, it would be strange if the query was directly responsible here. The CPU usage/stalling of the mongod only happens after the disk receives lots of reads and writes at the same time:&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;image-wrap&quot; style=&quot;&quot;&gt;&lt;img src=&quot;https://jira.mongodb.org/secure/attachment/389202/389202_image-2022-07-08-09-33-45-541.png&quot; width=&quot;100%&quot; style=&quot;border: 0px solid black&quot; /&gt;&lt;/span&gt;&lt;/p&gt;

&lt;p&gt;Since the amount of documents scanned doesn&apos;t appear to notably change beyond your normal workload, it&apos;s strange how it would suddenly occur by re-running queries that normally run OK for you at other times.&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;image-wrap&quot; style=&quot;&quot;&gt;&lt;img src=&quot;https://jira.mongodb.org/secure/attachment/389205/389205_image-2022-07-08-09-43-02-073.png&quot; width=&quot;100%&quot; style=&quot;border: 0px solid black&quot; /&gt;&lt;/span&gt;&lt;/p&gt;

&lt;p&gt;One thing you can consider &lt;b&gt;tuning your kernel dirty threshold&lt;/b&gt; since it appears to correlate with the high disk usage.&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;image-wrap&quot; style=&quot;&quot;&gt;&lt;img src=&quot;https://jira.mongodb.org/secure/attachment/389238/389238_image-2022-07-08-10-24-15-944.png&quot; style=&quot;border: 0px solid black&quot; /&gt;&lt;/span&gt;&lt;/p&gt;

&lt;p&gt;So, your logs begin at 2022-07-05T00:45:56.003+0000 which does not include the time period at which your disk usage begins (point A: 2022-07-05T00:45:48.011Z) so I don&apos;t know what precipitated it exactly. &lt;b&gt;If you could supply the logs including the time covering that period just before, that might help see if a specific query triggered it&lt;/b&gt; (as it should show up as a slow query). I would also keep an eye on whether there may be other processes that start using your disk around when this happens.&#160;&lt;/p&gt;

&lt;p&gt;So, to summarize:&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Possible Remediations/Workarounds:&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;tune your kernel dirty threshold&lt;/li&gt;
	&lt;li&gt;check for other processes that may be using your disk at the time before query latency occurs&lt;/li&gt;
	&lt;li&gt;consider colocating your journal, however this likely won&apos;t affect this behavior since journaling doesn&apos;t appear impacted by the latency.&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;More information we could use:&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;Provide the logs covering the time when hard drive disk usage increases (in this case, it was just a few seconds before your provided logs)&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;Regards,&lt;/p&gt;

&lt;p&gt;Christopher&lt;/p&gt;</comment>
                            <comment id="4656325" author="JIRAUSER1264730" created="Tue, 5 Jul 2022 05:45:39 +0000"  >&lt;p&gt;Please find my inline response as earlier:&lt;/p&gt;

&lt;p&gt;&quot;Just to clarify - you are running the same workload (and the same find queries) on the primary running 4.0, correct? Just double checking because I do not see output in your logs on 4.0 to confirm that the same queries are running.&quot;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; yeah it is running same workload , please find mongostat for reference for 4.0.27 primary member:&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;insert query update delete getmore command dirty &#160;used flushes vsize &#160; res qrw arw net_in net_out conn &#160; &#160; &#160; &#160; &#160; &#160; set repl &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;time&lt;/b&gt;&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;59 &#160; &#160;507 &#160; &#160;217 &#160; &#160; 996 &#160;1207|0 &#160;1.6% 75.6% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|0 &#160;2.44m &#160; 2.16m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:01.815&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;55 &#160; &#160;534 &#160; &#160;199 &#160; &#160;1071 &#160;1291|0 &#160;1.6% 75.6% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|0 &#160;2.62m &#160; 2.25m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:02.796&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;49 &#160; &#160;504 &#160; &#160;207 &#160; &#160;1007 &#160;1198|0 &#160;1.6% 75.6% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|0 &#160;2.42m &#160; 2.09m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:03.796&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;46 &#160; &#160;523 &#160; &#160;172 &#160; &#160; 943 &#160;1157|0 &#160;1.6% 75.6% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|0 &#160;2.32m &#160; 2.06m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:04.796&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;59 &#160; &#160;547 &#160; &#160;189 &#160; &#160; 971 &#160;1216|0 &#160;1.6% 75.6% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|0 &#160;2.46m &#160; 2.18m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:05.792&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;56 &#160; &#160;504 &#160; &#160;195 &#160; &#160; 969 &#160;1175|0 &#160;1.7% 75.7% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|0 &#160;2.37m &#160; 2.05m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:06.819&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;64 &#160; &#160;517 &#160; &#160;215 &#160; &#160;1030 &#160;1255|0 &#160;1.7% 75.7% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|0 &#160;2.52m &#160; 2.17m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:07.794&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;62 &#160; &#160;505 &#160; &#160;208 &#160; &#160;1030 &#160;1249|0 &#160;1.7% 75.7% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|1 &#160;2.50m &#160; 2.14m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:08.797&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;56 &#160; &#160;512 &#160; &#160;197 &#160; &#160; 921 &#160;1107|0 &#160;1.7% 75.7% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|0 &#160;2.25m &#160; 2.05m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:09.794&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;51 &#160; &#160;530 &#160; &#160;197 &#160; &#160; 962 &#160;1201|0 &#160;1.7% 75.7% &#160; &#160; &#160; 0 5.17G 3.17G 0|0 1|0 &#160;2.42m &#160; 2.12m &#160;290 rs-shard-ipv6-1 &#160;PRI Jul &#160;5 05:30:10.794&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&quot;4.2 Primary logs with mentioned verbosity &quot;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; I have uploaded the asked logs , please find the details(tar file) given below:&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;cps@system1:~$ tar -tf mongo-27029_PRIMARY_verbose.tar.gz&lt;/b&gt;&lt;br/&gt;
mongo-27029-PRIMARY-verbose.log.gz&lt;br/&gt;
diagnostic.data/&lt;br/&gt;
diagnostic.data/metrics.2022-07-04T23-52-58Z-00000&lt;br/&gt;
diagnostic.data/metrics.interim&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;More details about the recreated issue:&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;Repro steps:&lt;/b&gt;&#160;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&#160; &#160; &#160; &#160; &#160; &lt;b&gt;VM power off time: 2022-07-04T23:51:06&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&#160; &#160; &#160; &#160; &#160; &lt;b&gt;VM power on time: 2022-07-04T23:52:50&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&#160; &#160; &#160; &#160; &#160; &lt;b&gt;issue time: 2022-07-05T00:46:05&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;&lt;b&gt;mongostat of 4.2.20 Primary&lt;/b&gt;&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;{&lt;/b&gt;}&lt;b&gt;insert query update delete getmore command dirty &#160;used flushes vsize &#160; res qrw arw net_in net_out conn &#160; &#160; &#160; &#160; &#160; &#160; set repl &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160;time&lt;/b&gt;&lt;br/&gt;
&#160; &#160; **&#160; &#160; *0 &#160; &#160;53 &#160; &#160;513 &#160; &#160;199 &#160; &#160; 826 &#160;1011|0 &#160;1.4% 61.1% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|0 &#160;2.00m &#160; 1.99m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:30.956&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;48 &#160; &#160;500 &#160; &#160;196 &#160; &#160; 902 &#160;1075|0 &#160;1.4% 61.1% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|0 &#160;2.09m &#160; 2.05m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:31.955&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;49 &#160; &#160;510 &#160; &#160;200 &#160; &#160; 844 &#160;1027|0 &#160;1.4% 61.1% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|0 &#160;2.04m &#160; 2.04m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:32.950&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;66 &#160; &#160;468 &#160; &#160;233 &#160; &#160; 867 &#160;1077|0 &#160;1.4% 61.1% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|1 &#160;2.10m &#160; 2.03m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:33.961&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;53 &#160; &#160;465 &#160; &#160;209 &#160; &#160; 853 &#160;1023|0 &#160;1.4% 61.2% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|0 &#160;2.00m &#160; 1.99m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:34.950&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;62 &#160; &#160;504 &#160; &#160;207 &#160; &#160; 935 &#160;1109|0 &#160;1.4% 61.2% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|2 &#160;2.17m &#160; 2.16m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:35.951&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;54 &#160; &#160;472 &#160; &#160;198 &#160; &#160; 875 &#160;1060|0 &#160;1.4% 61.2% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|0 &#160;2.08m &#160; 2.00m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:36.950&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;40 &#160; &#160;494 &#160; &#160;219 &#160; &#160; 864 &#160;1038|0 &#160;1.5% 61.2% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|0 &#160;2.04m &#160; 2.05m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:37.949&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;51 &#160; &#160;467 &#160; &#160;204 &#160; &#160; 845 &#160; 984|0 &#160;1.5% 61.2% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|0 &#160;1.95m &#160; 1.94m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:38.950&lt;br/&gt;
&#160; &#160; *0 &#160; &#160;47 &#160; &#160;536 &#160; &#160;207 &#160; &#160; 934 &#160;1116|0 &#160;1.5% 61.2% &#160; &#160; &#160; 0 4.52G 2.41G 0|0 1|0 &#160;2.21m &#160; 2.19m &#160;312 rs-shard-ipv6-2 &#160;PRI Jul &#160;5 01:06:39.950&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;Suspected messages from latest logs(as a example):&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;Slow WT transaction. Lifetime of SnapshotId 19177106 was 6148ms&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Thanks,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4652473" author="JIRAUSER1265262" created="Fri, 1 Jul 2022 11:27:44 +0000"  >&lt;p&gt;Kapil,&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;Just to clarify - you are running the same workload (and the same find queries) on the primary running 4.0, correct? Just double checking because I do not see output in your logs on 4.0 to confirm that the same queries are running.&lt;/li&gt;
	&lt;li&gt;Can you please resubmit the logs for&#160;&lt;b&gt;4.2&lt;/b&gt;&#160;&lt;b&gt;Primary&lt;/b&gt; with the following log level?&#160;There were uasserts near the end of your slow queries that were not visible in your logs that might better describe what was happening. Once you observe the issue again, we can look at that output.
	&lt;ul&gt;
		&lt;li&gt;db.adminCommand (will not persist across restarts), the following sets the default verbosity level to 1, the query to 2, the storage to 2, and the storage.journal to 1:
&lt;p/&gt;
&lt;div id=&quot;syntaxplugin&quot; class=&quot;syntaxplugin&quot; style=&quot;border: 1px dashed #bbb; border-radius: 5px !important; overflow: auto; max-height: 30em;&quot;&gt;
&lt;table cellspacing=&quot;0&quot; cellpadding=&quot;0&quot; border=&quot;0&quot; width=&quot;100%&quot; style=&quot;font-size: 1em; line-height: 1.4em !important; font-weight: normal; font-style: normal; color: black;&quot;&gt;
		&lt;tbody &gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;  margin-top: 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;db.adminCommand( {&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;   setParameter: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;,&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;   logComponentVerbosity: {&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;      verbosity: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;,&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;      query: { verbosity: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;2&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; },&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;      storage: {&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;         verbosity: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;2&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;,&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;         journal: {&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;            verbosity: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;         }&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;      }&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;   }&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   margin-bottom: 10px;  width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;} )&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
			&lt;/tbody&gt;
&lt;/table&gt;
&lt;/div&gt;
&lt;p/&gt;
&lt;p&gt;If you find this to be too much logging, you can try just setting it to loglevel: 1 for all instead. However, the extra information up front would potentially decrease the amount of back-and-forth on this.&lt;/p&gt;&lt;/li&gt;
	&lt;/ul&gt;
	&lt;/li&gt;
&lt;/ul&gt;



&lt;p&gt;So, generally speaking, the FTDC is showing a huge CPU usage spike at the time of your find() queries which affected the entire server. Readers and writers were being queued up until a uassert was output, and then your metrics returned to normal.&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Queued readers/writers on 4.2 Primary:&lt;/b&gt;&lt;/p&gt;
&lt;div class=&apos;table-wrap&apos;&gt;
&lt;table class=&apos;confluenceTable&apos;&gt;&lt;tbody&gt;
&lt;tr&gt;
&lt;td class=&apos;confluenceTd&apos;&gt;2022-06-10T20:03:52.016+0000 I &#160;COMMAND &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn52&amp;#93;&lt;/span&gt; command ipv6bindings_1.ipv6bindings command: find { find: &quot;ipv6bindings&quot;, filter: 
{ _id: &quot;3101:0000:0009:11c1&quot; }
&lt;p&gt;, limit: 1, singleBatch: true, $db: &quot;ipv6bindings_1&quot;, $clusterTime:&lt;br/&gt;
Unknown macro: { clusterTime}&lt;br/&gt;
, $readPreference: { mode: &quot;nearest&quot; } } planSummary: IDHACK keysExamined:1 docsExamined:1 cursorExhausted:1 numYields:1 nreturned:1 reslen:520 locks:{ ReplicationStateTransition:{ acquireCount: &lt;br class=&quot;atl-forced-newline&quot; /&gt;&lt;/p&gt;
{ w: 2 }
&lt;p&gt;}&lt;br/&gt;
, Global: &lt;/p&gt;
&lt;div class=&quot;error&quot;&gt;&lt;span class=&quot;error&quot;&gt;Unknown macro: { acquireCount}&lt;/span&gt; &lt;/div&gt;
&lt;p&gt;storage:{} protocol:op_msg 3441ms&lt;/p&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;&lt;/table&gt;
&lt;/div&gt;


&lt;p&gt;&lt;span class=&quot;image-wrap&quot; style=&quot;&quot;&gt;&lt;img src=&quot;https://jira.mongodb.org/secure/attachment/387777/387777_image-2022-06-30-06-21-23-457.png&quot; width=&quot;100%&quot; style=&quot;border: 0px solid black&quot; /&gt;&lt;/span&gt;&lt;/p&gt;

&lt;p&gt;It doesn&apos;t make sense, however, that your find() query would suddenly stop performing as it usually does (given that you ran other find queries with the same filter that ran &amp;lt;700ms).&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;image-wrap&quot; style=&quot;&quot;&gt;&lt;img src=&quot;https://jira.mongodb.org/secure/attachment/386864/386864_image-2022-06-27-02-50-27-830.png&quot; width=&quot;100%&quot; style=&quot;border: 0px solid black&quot; /&gt;&lt;/span&gt;&lt;/p&gt;

&lt;p&gt;In fact, it seems most work in general is stopping at this point, and it doesn&apos;t appear to be waiting on IO at first glance. The higher log level should help give more context around what is happening at this exact point.&lt;/p&gt;</comment>
                            <comment id="4646914" author="JIRAUSER1264730" created="Wed, 29 Jun 2022 06:35:46 +0000"  >&lt;p&gt;Please find the in line response(as &lt;b&gt;R&lt;/b&gt;) for respective details:&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;For your workload,&#160;&lt;b&gt;please run explain(true)&lt;/b&gt;&#160;on the exact query/queries you are experiencing high response time issues with to&#160;&lt;a href=&quot;https://www.mongodb.com/docs/manual/reference/method/cursor.explain/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;get additional ExecutionStats information.&lt;/a&gt;&#160;The default mode is &quot;queryPlanner&quot;. MongoDB interprets true as &quot;allPlansExecution&quot; and false as &quot;queryPlanner&quot;. This will help narrow down possible reasons for the regression in response time.&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; As there is our app functionality which is immediately updating and deleting records, those queries wont be present if we try to find it manually and&#160; we did testing in different setups and queries are not same in all cases but if still it is required to collect info for the same , we can provide it by recreating the issue.&lt;/b&gt;&#160;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;The&#160;&lt;b&gt;logs FTDC of your PRIMARY node on 4.0&lt;/b&gt;&#160;running the workload you are mentioning (so we can compare with the 4.2 data you submitted)&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; I have uploaded FTDC for the same on upload portal --&amp;gt; file name: mongo-4.0.27-PRIMARY.tar.gz&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;Provide some reproduction steps so we can also verify this independently:&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt;&lt;/b&gt; I have already added repro steps in comment section. kindly check &quot;&lt;b&gt;Troubleshooting performed&lt;/b&gt;&quot; section in first comment.&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;
	&lt;ul&gt;
		&lt;li&gt;What is the shape of your current documents in this case?&#160;&lt;b&gt;Can you provide a sample document?&lt;/b&gt;&lt;b&gt;{&lt;/b&gt;}&lt;/li&gt;
	&lt;/ul&gt;
	&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; Please find the mongo stat output for the primary member.&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;insert query update delete getmore command&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;22 &#160; &#160;205 &#160; &#160; 58 &#160; &#160; 718 &#160; 841|0&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;12 &#160; &#160;219 &#160; &#160; 86 &#160; &#160; 769 &#160; 880|0&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;21 &#160; &#160;200 &#160; &#160; 73 &#160; &#160; 651 &#160; 794|0&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;20 &#160; &#160;219 &#160; &#160; 74 &#160; &#160; 696 &#160; 857|0&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;20 &#160; &#160;215 &#160; &#160; 68 &#160; &#160; 723 &#160; 848|0&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;19 &#160; &#160;212 &#160; &#160; 62 &#160; &#160; 741 &#160; 867|0&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;18 &#160; &#160;211 &#160; &#160; 57 &#160; &#160; 715 &#160; 833|0&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;15 &#160; &#160;215 &#160; &#160; 67 &#160; &#160; 790 &#160; 902|0&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;17 &#160; &#160;228 &#160; &#160; 66 &#160; &#160; 740 &#160; 886|0&lt;/b&gt;&#160;&lt;br/&gt;
&#160; &#160; *&lt;b&gt;0 &#160; &#160;23 &#160; &#160;206 &#160; &#160; 66 &#160; &#160; 726 &#160; 855|0&lt;/b&gt;&#160;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Sample document:&lt;/b&gt;&#160;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;rs-shard-ipv6-2:PRIMARY&amp;gt; db.drasessions.findOne()&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;{&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;_id&quot; : {&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;sessionid&quot; : &quot;ClpGx0:172.16.241.111:5933:1654866756:0151474820&quot;&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;},&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;ts&quot; : NumberLong(&quot;1656402314141&quot;),&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;srk&quot; : &quot;server101.sitebstandalone&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;originHost&quot; : &quot;site-b-client-calipers50-gx.pcef.gx-a&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;originRealm&quot; : &quot;client-consumer.calipers.pcef.gx&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;destHost&quot; : &quot;site-b-server-calipers101-gx.pcef.gx-8&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;destRealm&quot; : &quot;server-consumer.calipers.pcef.gx&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;apn&quot; : &quot;phone&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;imsi&quot; : &quot;580034821&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;ipv4&quot; : &quot;51.0.18.214&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;ipv6&quot; : &quot;6206:ae00:0000:12d5&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;msisdn&quot; : &quot;83404821&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;nextEvalTime&quot; : ISODate(&quot;2022-06-29T08:45:14.141Z&quot;),&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;staleSessionExpiryCount&quot; : 3,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;staleBindingRefreshTime&quot; : ISODate(&quot;2022-07-18T07:45:14.141Z&quot;),&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;sessionid&quot; : &quot;ClpGx0:172.16.241.111:5933:1654866756:0151474820&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;systemId&quot; : &quot;vpas-system-2&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;uuid&quot; : &quot;vpas-system-2252020078&quot;&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;}&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;rs-shard-ipv6-2:PRIMARY&amp;gt; use ipv6bindings_1&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;switched to db ipv6bindings_1&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;rs-shard-ipv6-2:PRIMARY&amp;gt; db.ipv6bindings.findOne()&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;{&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;_id&quot; : &quot;3101:0000:0002:dd53&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;ts&quot; : NumberLong(&quot;1656483427273&quot;),&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;staleBindingExpiryTime&quot; : ISODate(&quot;2022-07-09T06:17:07.274Z&quot;),&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;srk&quot; : &quot;serverb1.relay&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;fqdn&quot; : &quot;site-b-client-calipers21-gx.pcef.gx1&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;sessionid&quot; : &quot;ClpGx0:172.16.241.111:5021:1654866656:0159487757&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;uuid&quot; : &quot;vpas-system-21671833548&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;&quot;systemId&quot; : &quot;vpas-system-2&quot;&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;}&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;
	&lt;ul&gt;
		&lt;li&gt;Do you have an index on this collection?&lt;/li&gt;
	&lt;/ul&gt;
	&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; yes, please find the index details given below:&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Index details for&lt;/b&gt; &#160;&lt;b&gt;drasessions collection in&lt;/b&gt;&#160;&lt;b&gt;drasessions_1 db:&lt;/b&gt;&lt;b&gt;{&lt;/b&gt;}&lt;b&gt;{&lt;/b&gt;}&lt;/p&gt;

&lt;p&gt;&lt;b&gt;rs-shard-ipv6-2:PRIMARY&amp;gt; db.drasessions.getIndexes()&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;[&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;{&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;v&quot; : 2,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;key&quot; : {&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;systemId&quot; : 1,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;nextEvalTime&quot; : 1&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;},&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;name&quot; : &quot;systemId_1_nextEvalTime_1&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;ns&quot; : &quot;drasessions_1.drasessions&quot;&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;},&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;{&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;v&quot; : 2,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;key&quot; : {&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;_id&quot; : 1&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;},&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;name&quot; : &quot;&lt;em&gt;id&lt;/em&gt;&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;ns&quot; : &quot;drasessions_1.drasessions&quot;&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;}&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;]&lt;/b&gt;&lt;br/&gt;
**&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Index details for&lt;/b&gt; &#160;&lt;b&gt;ipv6bindings&lt;/b&gt;&lt;b&gt;{&lt;/b&gt;} &lt;b&gt;collection in&lt;/b&gt; &lt;b&gt;ipv6bindings_1&lt;/b&gt;&lt;b&gt;{&lt;/b&gt;}&#160;&lt;b&gt;db:&lt;/b&gt;&#160;**&#160;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;rs-shard-ipv6-2:PRIMARY&amp;gt; db.ipv6bindings.getIndexes()&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;[&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;{&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;v&quot; : 2,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;key&quot; : {&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;systemId&quot; : 1,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;staleBindingExpiryTime&quot; : 1&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;},&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;name&quot; : &quot;systemId_1_staleBindingExpiryTime_1&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;ns&quot; : &quot;ipv6bindings_1.ipv6bindings&quot;&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;},&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;{&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;v&quot; : 2,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;key&quot; : {&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;source&quot; : 1&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;},&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;name&quot; : &quot;source_1&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;ns&quot; : &quot;ipv6bindings_1.ipv6bindings&quot;&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;},&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;{&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;v&quot; : 2,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;key&quot; : {&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;_id&quot; : 1&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;},&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;name&quot; : &quot;&lt;em&gt;id&lt;/em&gt;&quot;,&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &lt;b&gt;&quot;ns&quot; : &quot;ipv6bindings_1.ipv6bindings&quot;&lt;/b&gt;&lt;br/&gt;
&#160; &#160; &#160; &#160; &lt;b&gt;}&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;]&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;
	&lt;ul&gt;
		&lt;li&gt;Is this running on a sharded collection? If so, is this the primary shard of the collection?&lt;/li&gt;
	&lt;/ul&gt;
	&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; yeah, we are using sharded collections which is storing shard information and actual data is stored on shards (as a replica set) for which we provided logs(primary and secondary&apos;s).&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;
	&lt;ul&gt;
		&lt;li&gt;Do you observe this latency running the command on mongos or directly on the node? (Or both?)&lt;/li&gt;
	&lt;/ul&gt;
	&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;&amp;lt;R&amp;gt; Not using mongos, using sharding db. Latency is observed on the shards(nodes).&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;I don&apos;t notice consistent spikes in query response time beyond the exact timestamp you mentioned (2022-06-10 20:03 UTC). Is this issue sporadic or is it always reproducible with a specific query? I notice you have constant workloads that don&apos;t have this latency normally, and I&apos;m curious what the difference is between the queries you are triggering the latency with versus not triggering it with.&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&#160;&lt;b&gt;&amp;lt;R&amp;gt; we observed 2 more instances for spikes but for those instance logs were not added as we got next occurrence &#160;after 28 hours: details is&#160; added in first comment of &quot;&lt;/b&gt;&lt;b&gt;Additional info&lt;/b&gt;&lt;b&gt;&quot; section.&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Issue is reproducible but it is coming at random interval.&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Issue is not coming for specific queries as we also checked in different setups.&lt;/b&gt;&lt;/p&gt;</comment>
                            <comment id="4643065" author="JIRAUSER1265262" created="Mon, 27 Jun 2022 19:47:53 +0000"  >&lt;p&gt;Kapil,&lt;/p&gt;

&lt;p&gt;Thanks for your patience. After looking through your FTDC, I can observe the slow queries taking place specifically on your primary. I am not observing anything else really out of the ordinary on your secondary nodes. However, I don&apos;t have FTDC to show what your primary was behaving like before you upgraded to 4.2, so I don&apos;t really have much to compare against - and there&apos;s nothing immediately obvious as a culprit just yet.&lt;/p&gt;

&lt;p&gt;It looks like this is happening to multiple queries and not just the one you ran explain() on. I&apos;d like to dive deeper into that to get more information.&lt;/p&gt;
&lt;p/&gt;
&lt;div id=&quot;syntaxplugin&quot; class=&quot;syntaxplugin&quot; style=&quot;border: 1px dashed #bbb; border-radius: 5px !important; overflow: auto; max-height: 30em;&quot;&gt;
&lt;table cellspacing=&quot;0&quot; cellpadding=&quot;0&quot; border=&quot;0&quot; width=&quot;100%&quot; style=&quot;font-size: 1em; line-height: 1.4em !important; font-weight: normal; font-style: normal; color: black;&quot;&gt;
		&lt;tbody &gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;  margin-top: 10px;   width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;2022&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;-&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;06&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;-10T20:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;03&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;52.009&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;+&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0000&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; I &#160;WRITE &#160; &#160;[conn357] remove ipv6bindings_1.ipv6bindings command: { q: { _id: &lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&quot;6206:ae00:0000:b06b&quot;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;, sessionid: &lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&quot;ClpGx1:172.16.241.111:5934:1654866756:0002385157&quot;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; }, limit: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } planSummary: IXSCAN { _id: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } keysExamined:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; docsExamined:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; ndeleted:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; numYields:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; queryHash:E3DFA639 planCacheKey:5384A64B locks:{ ParallelBatchWriterMode: { acquireCount: { r: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, ReplicationStateTransition: { acquireCount: { w: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Global: { acquireCount: { w: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Database: { acquireCount: { w: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Collection: { acquireCount: { w: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Mutex: { acquireCount: { r: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } } } flowControl:{ acquireCount: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;, timeAcquiringMicros: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } storage:{} 3461ms&lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
				&lt;tr id=&quot;syntaxplugin_code_and_gutter&quot;&gt;
						&lt;td  style=&quot; line-height: 1.4em !important; padding: 0em; vertical-align: top;&quot;&gt;
					&lt;pre style=&quot;font-size: 1em; margin: 0 10px;   margin-bottom: 10px;  width: auto; padding: 0;&quot;&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;2022&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;-&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;06&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;-10T20:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;03&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;52.009&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;+&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0000&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; I &#160;WRITE &#160; &#160;[conn264] remove ipv6bindings_1.ipv6bindings command: { q: { _id: &lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&quot;6206:ae00:0000:3b51&quot;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;, sessionid: &lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&quot;ClpGx0:172.16.241.111:5933:1654866756:0002385193&quot;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; }, limit: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } planSummary: IXSCAN { _id: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } keysExamined:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; docsExamined:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; ndeleted:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; numYields:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; queryHash:E3DFA639 planCacheKey:5384A64B locks:{ ParallelBatchWriterMode: { acquireCount: { r: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, ReplicationStateTransition: { acquireCount: { w: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Global: { acquireCount: { w: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Database: { acquireCount: { w: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Collection: { acquireCount: { w: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Mutex: { acquireCount: { r: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } } } flowControl:{ acquireCount: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } storage:{} 3317ms2022-&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;06&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;-10T20:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;03&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;52.016&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;+&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0000&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; I &#160;COMMAND &#160;[conn52] command ipv6bindings_1.ipv6bindings command: find { find: &lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&quot;ipv6bindings&quot;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;, filter: { _id: &lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&quot;3101:0000:0009:11c1&quot;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; }, limit: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;, singleBatch: &lt;/span&gt;&lt;span style=&quot;color: #006699; font-weight: bold; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;true&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;, $db: &lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&quot;ipv6bindings_1&quot;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;, $clusterTime: { clusterTime: Timestamp(&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1654891428&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;, &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;246&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;), signature: { hash: BinData(&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;0&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;, 1AE068B7041E0DF364A48B3B76192706D425B50D), keyId: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;7072624733400858628&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, lsid: { id: UUID(&lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&quot;55caafc8-fbb1-4ede-a1d8-dd788fadb341&quot;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;) }, $readPreference: { mode: &lt;/span&gt;&lt;span style=&quot;color: blue; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;&quot;nearest&quot;&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } } planSummary: IDHACK keysExamined:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; docsExamined:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; cursorExhausted:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; numYields:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; nreturned:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; reslen:&lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;520&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; locks:{ ReplicationStateTransition: { acquireCount: { w: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;2&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Global: { acquireCount: { r: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;2&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Database: { acquireCount: { r: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;2&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Collection: { acquireCount: { r: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;2&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } }, Mutex: { acquireCount: { r: &lt;/span&gt;&lt;span style=&quot;color: #009900; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt;1&lt;/span&gt;&lt;span style=&quot;color: black; font-family: &apos;Consolas&apos;, &apos;Bitstream Vera Sans Mono&apos;, &apos;Courier New&apos;, Courier, monospace !important;&quot;&gt; } } } storage:{} protocol:op_msg 3441ms &lt;/span&gt;&lt;/pre&gt;
			&lt;/td&gt;
		&lt;/tr&gt;
			&lt;/tbody&gt;
&lt;/table&gt;
&lt;/div&gt;
&lt;p/&gt;
&lt;p&gt;To look into what is exactly happening, and to better illustrate the regression taking place, can you please provide the following:&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;For your workload, &lt;b&gt;please run explain(true)&lt;/b&gt; on the exact query/queries you are experiencing high response time issues with to &lt;a href=&quot;https://www.mongodb.com/docs/manual/reference/method/cursor.explain/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;get additional ExecutionStats information.&lt;/a&gt; The default mode is &quot;queryPlanner&quot;. MongoDB interprets true as &quot;allPlansExecution&quot; and false as &quot;queryPlanner&quot;. This will help narrow down possible reasons for the regression in response time.&lt;/li&gt;
	&lt;li&gt;The &lt;b&gt;logs FTDC of your PRIMARY node on 4.0&lt;/b&gt; running the workload you are mentioning (so we can compare with the 4.2 data you submitted)&lt;/li&gt;
	&lt;li&gt;Provide some reproduction steps so we can also verify this independently:
	&lt;ul&gt;
		&lt;li&gt;What is the shape of your current documents in this case? &lt;b&gt;Can you provide a sample document?&lt;/b&gt;&lt;/li&gt;
		&lt;li&gt;Do you have an index on this collection?&lt;/li&gt;
		&lt;li&gt;Is this running on a sharded collection? If so, is this the primary shard of the collection?&lt;/li&gt;
		&lt;li&gt;Do you observe this latency running the command on mongos or directly on the node? (Or both?)&lt;/li&gt;
	&lt;/ul&gt;
	&lt;/li&gt;
	&lt;li&gt;I don&apos;t notice consistent spikes in query response time beyond the exact timestamp you mentioned (2022-06-10 20:03 UTC). Is this issue sporadic or is it always reproducible with a specific query? I notice you have constant workloads that don&apos;t have this latency normally, and I&apos;m curious what the difference is between the queries you are triggering the latency with versus not triggering it with.&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;I will also look into this further as well.&lt;/p&gt;

&lt;p&gt;Christopher&lt;/p&gt;</comment>
                            <comment id="4640751" author="JIRAUSER1264730" created="Mon, 27 Jun 2022 02:36:24 +0000"  >&lt;p&gt;Hi Christopher,&lt;/p&gt;

&lt;p&gt;Could you please provide us any temporary WA for this issue as we have release planned soon(have requested from organization(cisco) email as well), it would be really help for us as we are blocked for the release.&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Thanks,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4637320" author="JIRAUSER1264730" created="Fri, 24 Jun 2022 04:48:49 +0000"  >&lt;p&gt;Hi Christopher,&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;Mongo logs on 4.0&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;Logs for 4.0.27 are attached in same zip file as out of 4 data bearing members 2 are from 4.0.27, please find the details given below for 4.0.27 members:&lt;/p&gt;

&lt;p&gt;&lt;b&gt;mongo-27029-SECONDARY_8a.tar.gz&lt;/b&gt;&lt;br/&gt;
&lt;b&gt;mongo-27029-SECONDARY_8b.tar.gz&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;query plan&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;please find query plan for both 4.0.27 and 4.2.20 given below:&lt;/p&gt;

&lt;p&gt;&lt;b&gt;4.0.27:&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;rs-shard-ipv6-2:SECONDARY&amp;gt; db.drasessions.find().explain()&lt;br/&gt;
{&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;queryPlanner&quot; : {&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;plannerVersion&quot; : 1,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;namespace&quot; : &quot;drasessions_1.drasessions&quot;,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;indexFilterSet&quot; : false,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;parsedQuery&quot; : &lt;/p&gt;
{

&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; },&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;winningPlan&quot; : {
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;stage&quot; : &quot;COLLSCAN&quot;,
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;direction&quot; : &quot;forward&quot;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; },&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;rejectedPlans&quot; : [ ]&lt;br/&gt;
&#160; &#160; &#160; &#160; },&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;serverInfo&quot; : {
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;host&quot; : &quot;mongo-s109&quot;,
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;port&quot; : 27029,
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;version&quot; : &quot;4.0.27&quot;,
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;gitVersion&quot; : &quot;d47b151b55f286546e7c7c98888ae0577856ca20&quot;
&#160; &#160; &#160; &#160; },&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;ok&quot; : 1,&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;operationTime&quot; : Timestamp(1656045744, 205),&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;$clusterTime&quot; : {&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;clusterTime&quot; : Timestamp(1656045744, 205),&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;signature&quot; : {
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;hash&quot; : BinData(0,&quot;AZMjfFZeJm0AtPqm+P1EKDCoj8Q=&quot;),
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;keyId&quot; : NumberLong(&quot;7072624733400858628&quot;)
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; }&lt;br/&gt;
&#160; &#160; &#160; &#160; }&lt;br/&gt;
}&lt;br/&gt;
&lt;br/&gt;
&#160;&lt;br/&gt;
&lt;br/&gt;
&lt;b&gt;4.2.20:&lt;/b&gt;&lt;br/&gt;
&lt;br/&gt;
rs-shard-ipv6-2:PRIMARY&amp;gt; db.drasessions.find().explain()&lt;br/&gt;
{&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;queryPlanner&quot; : {&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;plannerVersion&quot; : 1,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;namespace&quot; : &quot;drasessions_1.drasessions&quot;,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;indexFilterSet&quot; : false,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;parsedQuery&quot; : {

&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; }
&lt;p&gt;,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;queryHash&quot; : &quot;8B3D4AB8&quot;,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;planCacheKey&quot; : &quot;8B3D4AB8&quot;,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;winningPlan&quot; : &lt;/p&gt;
{
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;stage&quot; : &quot;COLLSCAN&quot;,
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;direction&quot; : &quot;forward&quot;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; }
&lt;p&gt;,&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;rejectedPlans&quot; : [ ]&lt;br/&gt;
&#160; &#160; &#160; &#160; },&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;serverInfo&quot; : &lt;/p&gt;
{
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;host&quot; : &quot;mongo-s109&quot;,
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;port&quot; : 27029,
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;version&quot; : &quot;4.2.18&quot;,
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;gitVersion&quot; : &quot;f65ce5e25c0b26a00d091a4d24eec1a8b3a4c016&quot;
&#160; &#160; &#160; &#160; }
&lt;p&gt;,&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;ok&quot; : 1,&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;$clusterTime&quot; : {&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;clusterTime&quot; : Timestamp(1656045392, 89),&lt;br/&gt;
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;signature&quot; : &lt;/p&gt;
{
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;hash&quot; : BinData(0,&quot;ZLZuuzhWqgu39YINJXNZ1aspBds=&quot;),
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; &quot;keyId&quot; : NumberLong(&quot;7072624733400858628&quot;)
&#160; &#160; &#160; &#160; &#160; &#160; &#160; &#160; }
&lt;p&gt;&#160; &#160; &#160; &#160; },&lt;br/&gt;
&#160; &#160; &#160; &#160; &quot;operationTime&quot; : Timestamp(1656045392, 89)&lt;br/&gt;
}&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Thanks,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4636152" author="JIRAUSER1265262" created="Thu, 23 Jun 2022 18:08:28 +0000"  >&lt;p&gt;Hi Kapil,&lt;/p&gt;

&lt;p&gt;Thank you for your patience! To look into this further, could you also provide the following from your testing:&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;Mongo logs on 4.0 as well as 4.2 (you submitted 4.2 already)&lt;/li&gt;
	&lt;li&gt;Add $explain to your queries so we can examine the query plan being used&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;Christopher&lt;/p&gt;</comment>
                            <comment id="4627968" author="JIRAUSER1264730" created="Tue, 21 Jun 2022 05:34:58 +0000"  >&lt;p&gt;Hi Chris,&lt;/p&gt;

&lt;p&gt;Gentle Reminder! did you get something from logs?&lt;/p&gt;

&lt;p&gt;Thanks,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4618232" author="JIRAUSER1264730" created="Wed, 15 Jun 2022 16:24:45 +0000"  >&lt;p&gt;Hi Chris,&lt;/p&gt;

&lt;p&gt;I have uploaded file(&lt;b&gt;mongo-diag-log.tar.gz&lt;/b&gt;- file structure is provided in description) on upload portal.&lt;/p&gt;

&lt;p&gt;Kindly look into it.&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Thanks,&lt;/p&gt;

&lt;p&gt;Kapil&lt;/p&gt;</comment>
                            <comment id="4618047" author="JIRAUSER1265262" created="Wed, 15 Jun 2022 15:36:52 +0000"  >&lt;p&gt;Hi Kapil,&lt;/p&gt;

&lt;p&gt;I&apos;ve created a secure &lt;a href=&quot;https://amphora.corp.mongodb.com/public/upload/eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJmb2xkZXJfaWQiOiIxNjUzNDUxMDE0MDciLCJleHAiOjE2NTc4OTkzMTZ9.NRTcZlzZ20zUf_rxMh0H6E6_cT5RdJeXU-9DSUCf9SQ&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;upload portal&lt;/a&gt; for you. Files uploaded to this portal are hosted on Box, are visible only to MongoDB employees, and are routinely deleted after some time.&lt;/p&gt;

&lt;p&gt;For each node in the replica set spanning a time period that includes the incident, would you please archive (tar or zip) and upload to that link:&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;the mongod logs&lt;/li&gt;
	&lt;li&gt;the &lt;tt&gt;$dbpath/diagnostic.data&lt;/tt&gt; directory (the contents are described &lt;a href=&quot;https://docs.mongodb.com/manual/administration/analyzing-mongodb-performance/#full-time-diagnostic-data-capture&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;here&lt;/a&gt;)&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;Let us know when you&apos;ve gone ahead and submitted your logs.&lt;/p&gt;

&lt;p&gt;Regards,&lt;br/&gt;
Christopher&lt;br/&gt;
&#160;&lt;/p&gt;</comment>
                            <comment id="4609316" author="JIRAUSER1264730" created="Sun, 12 Jun 2022 05:26:17 +0000"  >&lt;p&gt;&lt;b&gt;Troubleshooting performed:&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;We stopped and restarted load , did not find high query response.&lt;/li&gt;
	&lt;li&gt;We stopped and started all mongo instances and then did load testing , did not find high query response.&#160;&#160;**&#160;&#160;&lt;/li&gt;
	&lt;li&gt;&lt;b&gt;Repro steps more details:&lt;/b&gt; High query response are seen only the VM which we rebooted(errors frequency is not fixed), we performed the test in different setup also which ever VM we are rebooting, high query response are seen from that VM mongo Primary member.&lt;/li&gt;
	&lt;li&gt;We performed same tests with full load and 50% of full load and 25 % of full load and observed high query response in all cases.&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;Additional info:&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;VM reboot time: &lt;b&gt;2022-06-10T14:50:25&lt;/b&gt;&lt;/li&gt;
	&lt;li&gt;High query response is increased(~ &lt;b&gt;6 sec&lt;/b&gt;) in second occurrence given below:&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;2022-06-11T18:41:56(second occurrence)&lt;/b&gt;.312+0000 I &#160;COMMAND &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn59&amp;#93;&lt;/span&gt; command ipv6bindings_1.ipv6bindings command: find { find: &quot;ipv6bindings&quot;, filter:&lt;/p&gt;

{ _id: &quot;3101:0000:0003:94da&quot; }

&lt;p&gt;, limit: 1, singleBatch: true, $db: &quot;ipv6bindings_1&quot;, $clusterTime: { clusterTime: Timestamp(1654972910, 279), signature:{ hash: BinData(0, 8E670E972822B0FF4FB23BD426B9847B6A7521F4), keyId: 7072624733400858628 }}, lsid: { id: UUID(&quot;0aea1d8a-3134-4b7c-8978-6861a3a16626&quot;) }, $readPreference: { mode: &quot;nearest&quot; } } planSummary: IDHACK keysExamined:1 docsExamined:1 cursorExhausted:1 numYields:1 nreturned:1 reslen:519 locks:{ ReplicationStateTransition: { acquireCount:{ w: 2 }}, Global: { acquireCount:{ r: 2 }}, Database: { acquireCount:{ r: 2 }}, Collection: { acquireCount:{ r: 2 }}, Mutex: { acquireCount:{ r: 1 }} } storage:{} protocol:op_msg &lt;b&gt;5926ms&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;High query response is decreased(~ 1&#160;&lt;b&gt;sec&lt;/b&gt;) in third occurrence given below:&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&lt;b&gt;2022-06-11T19:51:42(third occurence)&lt;/b&gt;.497+0000 I &#160;WRITE &#160; &#160;&lt;span class=&quot;error&quot;&gt;&amp;#91;conn85&amp;#93;&lt;/span&gt; update drasessions_1.drasessions command: { q: { _id:{ sessionid: &quot;ClpGx1:172.16.241.111:5022:1654866656:0010848441&quot; }}, u: { $set:{ nextEvalTime: new Date(1655067101292), staleSessionExpiryCount: 3 }}, multi: false, upsert: false } planSummary: IDHACK keysExamined:1 docsExamined:1 nMatched:1 nModified:1 keysInserted:1 keysDeleted:1 numYields:1 locks:{ ParallelBatchWriterMode: { acquireCount:{ r: 2 }}, ReplicationStateTransition: { acquireCount:{ w: 2 }}, Global: {acquireCount:{ w: 2 }}, Database: { acquireCount:{ w: 2 }}, Collection: { acquireCount:{ w: 2 }}, Mutex: { acquireCount:r: 2 }} } flowControl:{ acquireCount: 2, timeAcquiringMicros: 1 } storage:{} &lt;b&gt;1203ms&lt;/b&gt;&lt;/p&gt;

&lt;p&gt;&lt;b&gt;Note:&lt;/b&gt; High query response are coming in random intervals and varying from 1 sec to 6 sec.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10012">
                    <name>Related</name>
                                            <outwardlinks description="related to">
                                        <issuelink>
            <issuekey id="2235698">SERVER-72978</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                            <attachment id="386854" name="image-2022-06-27-02-18-16-686.png" size="66655" author="chris.kelly@mongodb.com" created="Mon, 27 Jun 2022 06:18:18 +0000"/>
                            <attachment id="386855" name="image-2022-06-27-02-23-01-877.png" size="56471" author="chris.kelly@mongodb.com" created="Mon, 27 Jun 2022 06:23:03 +0000"/>
                            <attachment id="386856" name="image-2022-06-27-02-25-35-724.png" size="52923" author="chris.kelly@mongodb.com" created="Mon, 27 Jun 2022 06:25:37 +0000"/>
                            <attachment id="386857" name="image-2022-06-27-02-26-11-985.png" size="71089" author="chris.kelly@mongodb.com" created="Mon, 27 Jun 2022 06:26:13 +0000"/>
                            <attachment id="386859" name="image-2022-06-27-02-34-24-202.png" size="101955" author="chris.kelly@mongodb.com" created="Mon, 27 Jun 2022 06:34:26 +0000"/>
                            <attachment id="386860" name="image-2022-06-27-02-34-36-415.png" size="99384" author="chris.kelly@mongodb.com" created="Mon, 27 Jun 2022 06:34:37 +0000"/>
                            <attachment id="386864" name="image-2022-06-27-02-50-27-830.png" size="101742" author="chris.kelly@mongodb.com" created="Mon, 27 Jun 2022 06:50:29 +0000"/>
                            <attachment id="387777" name="image-2022-06-30-06-21-23-457.png" size="27610" author="chris.kelly@mongodb.com" created="Thu, 30 Jun 2022 10:21:23 +0000"/>
                            <attachment id="387780" name="image-2022-06-30-06-26-01-013.png" size="70311" author="chris.kelly@mongodb.com" created="Thu, 30 Jun 2022 10:26:01 +0000"/>
                            <attachment id="387779" name="image-2022-06-30-06-28-57-244.png" size="32008" author="chris.kelly@mongodb.com" created="Thu, 30 Jun 2022 10:28:57 +0000"/>
                            <attachment id="389200" name="image-2022-07-08-09-23-16-585.png" size="188277" author="chris.kelly@mongodb.com" created="Fri, 8 Jul 2022 13:23:16 +0000"/>
                            <attachment id="389202" name="image-2022-07-08-09-33-45-541.png" size="56625" author="chris.kelly@mongodb.com" created="Fri, 8 Jul 2022 13:33:45 +0000"/>
                            <attachment id="389205" name="image-2022-07-08-09-43-02-073.png" size="13823" author="chris.kelly@mongodb.com" created="Fri, 8 Jul 2022 13:43:02 +0000"/>
                            <attachment id="389239" name="image-2022-07-08-10-24-12-094.png" size="87171" author="chris.kelly@mongodb.com" created="Fri, 8 Jul 2022 14:24:12 +0000"/>
                            <attachment id="389238" name="image-2022-07-08-10-24-15-944.png" size="87171" author="chris.kelly@mongodb.com" created="Fri, 8 Jul 2022 14:24:16 +0000"/>
                            <attachment id="399468" name="image-2022-09-01-15-09-05-589.png" size="55609" author="chris.kelly@mongodb.com" created="Thu, 1 Sep 2022 19:09:05 +0000"/>
                            <attachment id="399469" name="image-2022-09-01-15-09-39-664.png" size="72722" author="chris.kelly@mongodb.com" created="Thu, 1 Sep 2022 19:09:40 +0000"/>
                            <attachment id="399472" name="image-2022-09-01-15-15-42-817.png" size="87154" author="chris.kelly@mongodb.com" created="Thu, 1 Sep 2022 19:15:43 +0000"/>
                            <attachment id="384187" name="rs.status.txt" size="10837" author="kg3634@gmail.com" created="Sat, 11 Jun 2022 07:30:12 +0000"/>
                            <attachment id="387775" name="screenshot-1.png" size="23018" author="chris.kelly@mongodb.com" created="Thu, 30 Jun 2022 10:16:31 +0000"/>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                <customfield id="customfield_10050" key="com.atlassian.jira.toolkit:comments">
                        <customfieldname># Replies</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>22.0</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                <customfield id="customfield_10055" key="com.atlassian.jira.ext.charting:firstresponsedate">
                        <customfieldname>Date of 1st Reply</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>Wed, 15 Jun 2022 15:36:52 +0000</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10052" key="com.atlassian.jira.toolkit:dayslastcommented">
                        <customfieldname>Days since reply</customfieldname>
                        <customfieldvalues>
                                        1 year, 6 weeks, 6 days ago
    
                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_18254" key="com.onresolve.jira.groovy.groovyrunner:scripted-field">
                        <customfieldname>Dependencies</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue><![CDATA[]]></customfieldvalue>


                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_15850" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                    <customfield id="customfield_10057" key="com.atlassian.jira.toolkit:lastusercommented">
                        <customfieldname>Last comment by Customer</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>true</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10056" key="com.atlassian.jira.toolkit:lastupdaterorcommenter">
                        <customfieldname>Last commenter</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>kg3634@gmail.com</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_11151" key="com.atlassian.jira.toolkit:LastCommentDate">
                        <customfieldname>Last public comment date</customfieldname>
                        <customfieldvalues>
                            1 year, 6 weeks, 6 days ago
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                    <customfield id="customfield_10032" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Operating System</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10026"><![CDATA[ALL]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                <customfield id="customfield_10051" key="com.atlassian.jira.toolkit:participants">
                        <customfieldname>Participants</customfieldname>
                        <customfieldvalues>
                                        <customfieldvalue>chris.kelly@mongodb.com</customfieldvalue>
            <customfieldvalue>kg3634@gmail.com</customfieldvalue>
    
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                        <customfield id="customfield_14254" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Product Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i0y89z:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                <customfield id="customfield_12550" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>2|i04yyj:y</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10558" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_23361" key="com.onresolve.jira.groovy.groovyrunner:scripted-field">
                        <customfieldname>Requested By</customfieldname>
                        <customfieldvalues>
                                

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                            <customfield id="customfield_10750" key="com.atlassian.jira.plugin.system.customfieldtypes:textarea">
                        <customfieldname>Steps To Reproduce</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>&lt;p&gt;observed high query response after 6 hours of rebooting of one DB VM.&lt;/p&gt;</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                    <customfield id="customfield_10053" key="com.atlassian.jira.ext.charting:timeinstatus">
                        <customfieldname>Time In Status</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_22870" key="com.onresolve.jira.groovy.groovyrunner:scripted-field">
                        <customfieldname>Triagers</customfieldname>
                        <customfieldvalues>
                                    <customfieldvalue><![CDATA[chris.kelly@mongodb.com]]></customfieldvalue>
    

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                                                    <customfield id="customfield_14350" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>serverRank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i0xufb:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                    </customfields>
    </item>
</channel>
</rss>