<!-- 
RSS generated by JIRA (9.7.1#970001-sha1:2222b88b221c4928ef0de3161136cc90c8356a66) at Thu Feb 08 09:05:39 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>MongoDB Jira</title>
    <link>https://jira.mongodb.org</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.7.1</version>
        <build-number>970001</build-number>
        <build-date>13-04-2023</build-date>
    </build-info>


<item>
            <title>[KAFKA-127] Kafka Source connector handling documents greater than 16MB BSON</title>
                <link>https://jira.mongodb.org/browse/KAFKA-127</link>
                <project id="16285" key="KAFKA">Kafka Connector</project>
                    <description>&lt;p&gt;Change stream response documents must adhere to the 16MB BSON :&#160;&lt;a href=&quot;https://docs.mongodb.com/manual/administration/change-streams-production-recommendations/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://docs.mongodb.com/manual/administration/change-streams-production-recommendations/&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Currently Kafka connector:&lt;/p&gt;

&lt;p&gt;1) Doesn&apos;t create a error or exception but has an info message:&lt;/p&gt;

&lt;p&gt;&lt;tt&gt;&#160;INFO An exception occurred when trying to get the next item from the changestream. (com.mongodb.kafka.connect.source.MongoSourceTask)&#160;INFO An exception occurred when trying to get the next item from the changestream. (com.mongodb.kafka.connect.source.MongoSourceTask)kfc-mongodb-7bbc79cb64-hdddg kfc-mongodb 2020-07-09T16:29:47.376320977Z com.mongodb.MongoCommandException: Command failed with error 10334 (Location10334): &apos;BSONObj size: 19376544 (0x127A9A0) is invalid. Size must be between 0 and 16793600(16MB) First element: _id: { _data: BinData(0, 825F0745F700000005461E5F69640031035342000004BC005A100458C65EB8491243558BB64BDAC2914E5204), _typeBits: BinData(0, 02) }&apos; on server db-core-1.ebs.us-east-1.stage-us.int.evbg.io:29001. The full response is {&quot;operationTime&quot;: {&quot;$timestamp&quot;: {&quot;t&quot;: 1594312187, &quot;i&quot;: 87&lt;/tt&gt;, &quot;ok&quot;: 0.0, &quot;errmsg&quot;: &quot;BSONObj size: 19376544 (0x127A9A0) is invalid. Size must be between 0 and 16793600(16MB) First element: _id: { _data: BinData(0, 825F0745F700000005461E5F69640031035342000004BC005A100458C65EB8491243558BB64BDAC2914E5204), _typeBits: BinData(0, 02) }&quot;, &quot;code&quot;: 10334, &quot;codeName&quot;: &quot;Location10334&quot;, &quot;$clusterTime&quot;: {&quot;clusterTime&quot;: {&quot;$timestamp&quot;: {&quot;t&quot;: 1594312187, &quot;i&quot;: 87}}, &quot;signature&quot;: {&quot;hash&quot;: &lt;/p&gt;
{&quot;$binary&quot;: &quot;WPjr3+K8zh6NDOpkTmK6rNvmM4M=&quot;, &quot;$type&quot;: &quot;00&quot;}
&lt;p&gt;, &quot;keyId&quot;: {&quot;$numberLong&quot;: &quot;6828579125364523010&quot;}}}}}}&lt;/p&gt;

&lt;p&gt;2. Task status is still running but it fails to process change stream&lt;/p&gt;

&lt;p&gt;&lt;tt&gt;kfc-mongodb-7bbc79cb64-hdddg kfc-mongodb 2020-07-09T15:59:57.191098599Z &lt;span class=&quot;error&quot;&gt;&amp;#91;2020-07-09 15:59:57,190&amp;#93;&lt;/span&gt; INFO Failed to resume change stream: BSONObj size: 19224166 (0x1255666) is invalid. Size must be between 0 and 16793600(16MB) First element: _id: { _data: BinData(0, 825F073D080000000E461E5F69640031035342000004BC005A100458C65EB8491243558BB64BDAC2914E5204), _typeBits: BinData(0, 02) } 10334 (com.mongodb.kafka.connect.source.MongoSourceTask)&lt;/tt&gt;&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Regarding error handling, this seems related to&#160;&lt;a href=&quot;https://jira.mongodb.org/browse/KAFKA-89&quot; class=&quot;external-link&quot; rel=&quot;nofollow&quot;&gt;https://jira.mongodb.org/browse/KAFKA-89&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Is there a way to have connector resilient of this issues with improved error handling and task status. Thanks in advance.&lt;/p&gt;</description>
                <environment>Kafka Connector 1.1&lt;br/&gt;
Mongo DB:3.6</environment>
        <key id="1406811">KAFKA-127</key>
            <summary>Kafka Source connector handling documents greater than 16MB BSON</summary>
                <type id="1" iconUrl="https://jira.mongodb.org/secure/viewavatar?size=xsmall&amp;avatarId=14703&amp;avatarType=issuetype">Bug</type>
                                            <priority id="3" iconUrl="https://jira.mongodb.org/images/icons/priorities/major.svg">Major - P3</priority>
                        <status id="6" iconUrl="https://jira.mongodb.org/images/icons/statuses/closed.png" description="The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.">Closed</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="3">Duplicate</resolution>
                                        <assignee username="ross@mongodb.com">Ross Lawley</assignee>
                                    <reporter username="sabari.mgn@gmail.com">Sabari Gandhi</reporter>
                        <labels>
                    </labels>
                <created>Fri, 10 Jul 2020 19:44:51 +0000</created>
                <updated>Thu, 16 Jul 2020 09:42:35 +0000</updated>
                            <resolved>Thu, 16 Jul 2020 09:42:35 +0000</resolved>
                                    <version>1.1</version>
                                                                        <votes>0</votes>
                                    <watches>3</watches>
                                                                                                                <comments>
                            <comment id="3287612" author="ross@10gen.com" created="Thu, 16 Jul 2020 09:42:25 +0000"  >&lt;p&gt;Thanks for the ticket.&lt;/p&gt;

&lt;p&gt;I&apos;m going to mark this as a duplicate of &lt;a href=&quot;https://jira.mongodb.org/browse/KAFKA-105&quot; title=&quot;Support errors.tolerance&quot; class=&quot;issue-link&quot; data-issue-key=&quot;KAFKA-105&quot;&gt;&lt;del&gt;KAFKA-105&lt;/del&gt;&lt;/a&gt; and will ensure that the scenario is tested  when errors.tolerance support is added.&lt;/p&gt;</comment>
                            <comment id="3286661" author="sabari.mgn@gmail.com" created="Wed, 15 Jul 2020 17:47:06 +0000"  >&lt;p&gt;I was able to reproduce the issue and have attached logs for both 1.1 and 1.2.&lt;/p&gt;

&lt;p&gt;Setup:&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;I used the docker setup that is available in the github example.&#160;I am looking for the full document when the update is made so I have change.stream.full.document updateLookup&lt;/li&gt;
	&lt;li&gt;I have attached the MongoTest file which created a document and keeps updating the document ina. loop. So you will hit the error in some time. attached logs for both 1.1 and 1.2&lt;/li&gt;
	&lt;li&gt;The connector status will still be running and it fails to resume change stream.&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;As mentioned in 1.2 logs support for errors.tolerance will ensure the error is logged and the connector is functional in such scenarios. There is a ticket open for that &lt;a href=&quot;https://jira.mongodb.org/browse/KAFKA-105&quot; class=&quot;external-link&quot; rel=&quot;nofollow&quot;&gt;https://jira.mongodb.org/browse/KAFKA-105&lt;/a&gt;&#160;. Thanks !&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;</comment>
                            <comment id="3283462" author="sabari.mgn@gmail.com" created="Mon, 13 Jul 2020 21:13:17 +0000"  >&lt;p&gt;Scenario / Use case:&lt;/p&gt;
&lt;ul&gt;
	&lt;li&gt;In my setup, I am looking for the full document when the update is made so I have change.stream.full.document updateLookup.&lt;/li&gt;
	&lt;li&gt;When any document is updated in this case appended with additional fields/information after some updates the BSON size grows and reaches the threshold.&lt;/li&gt;
	&lt;li&gt;Since the setup looks for change stream and when the scenario is met I get the above exception.&lt;/li&gt;
&lt;/ul&gt;


&lt;p&gt;I am trying to reproduce the issue locally will updated additional details. Thanks!&lt;br/&gt;
&#160;&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10010">
                    <name>Duplicate</name>
                                            <outwardlinks description="duplicates">
                                        <issuelink>
            <issuekey id="1341837">KAFKA-105</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                            <attachment id="270065" name="MongoTest.java" size="2588" author="sabari.mgn@gmail.com" created="Wed, 15 Jul 2020 17:37:41 +0000"/>
                            <attachment id="270063" name="mongo_1.1.txt" size="12766" author="sabari.mgn@gmail.com" created="Wed, 15 Jul 2020 17:36:43 +0000"/>
                            <attachment id="270064" name="mongo_1.2.txt" size="4080" author="sabari.mgn@gmail.com" created="Wed, 15 Jul 2020 17:36:43 +0000"/>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_15850" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                    <customfield id="customfield_12550" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>2|hxhaen:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10558" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                </customfields>
    </item>
</channel>
</rss>